raus.py 955 B

1234567891011121314151617181920212223242526272829
  1. import os
  2. import sys # Add this import
  3. def remove_duplicate_lines(filepath):
  4. # Read the file and split it into lines
  5. with open(filepath, 'r') as file:
  6. lines = file.readlines()
  7. # Remove duplicates by converting the list of lines to a set, then back to a list
  8. # This also sorts the lines, as sets do not maintain order
  9. unique_lines = list(set(lines))
  10. # Sort the lines if needed (optional, depending on whether you want to maintain original order)
  11. unique_lines.sort()
  12. # Write the unique lines back to the file
  13. with open(filepath, 'w') as file:
  14. file.writelines(unique_lines)
  15. if __name__ == "__main__":
  16. # Get filepath from command line arguments
  17. if len(sys.argv) < 2:
  18. print("Usage: python remove_duplicates.py <path_to_file>")
  19. sys.exit(1)
  20. file_to_process = sys.argv[1]
  21. print(f"Processing file: {file_to_process}")
  22. remove_duplicate_lines(file_to_process)