How to optimize this script
I have written 开发者_运维知识库the following script. It opens a file, reads each line from it splitting by new line character and deleting first character in line. If line exists it's being added to array. Next each element of array is splitted by whitespace, sorted alphabetically and joined again. Every line is printed because script is fired from console and writes everything to file using standard output. I'd like to optimize this code to be more pythonic. Any ideas ?
import sys
def main():
filename = sys.argv[1]
file = open(filename)
arr = []
for line in file:
line = line[1:].replace("\n", "")
if line:
arr.append(line)
for line in arr:
lines = line.split(" ")
lines.sort(key=str.lower)
line = ''.join(lines)
print line
if __name__ == '__main__':
main()
def main():
file = open(sys.argv[1])
for line in file:
if line.rstrip():
print ''.join(sorted(line[1:-1].split(), key=str.lower()))
Why create the list arr
? The file is already a sequence. Why are you creating arr
and not doing anything with it except iterating again.
for line in file:
line = line[1:].replace("\n", "")
if not line: continue
lines = line.split(" ")
lines.sort(key=str.lower)
line = ''.join(lines)
print line
You can condense the second loop in the first one:
import sys
def main():
filename = sys.argv[1]
file = open(filename)
for line in file:
if line.strip():
lines = line.split(" ")
lines.sort(key=str.lower)
print ''.join(lines)
if __name__ == '__main__':
main()
for small files:
import fileinput
lines = []
for line in fileinput.input():
line = line[1:].strip()
if line:
words = line.split()
words.sort(key=str.lower)
lines.append(' '.join(words))
print '\n'.join(lines)
for big files:
import fileinput
for line in fileinput.input():
line = line[1:].strip()
if line:
words = line.split()
words.sort(key=str.lower)
print ' '.join(words)
import fileinput
def main():
for line in fileinput.input():
words = line[1:].split() # strip() is redundant
if words:
words.sort(key=str.lower)
print ' '.join(words)
if __name__=="__main__":
main()
精彩评论