<pre><code>import subprocess
from multiprocessing import Pool, cpu_count
import pandas as pd
def multi_processor(function_name, file_path, output_format):
file_list = []
#file_list = str(subprocess.check_output(f"find {file_path} -type f -iname \"*.csv*\" ", shell=True)).split('\\n')
#file_list = sorted(file_list)
# Test, put 6 strings in the list so your_function should run six times with 6 processors in parallel
file_list.append("file_path1")
file_list.append("file_path2")
file_list.append("file_path3")
file_list.append("file_path4")
file_list.append("file_path5")
file_list.append("file_path6")
# Use max number of system processors - 1
pool = Pool(processes=cpu_count()-1)
pool.daemon = True
results = {}
# for every file in the file list, start a new process
for each_file in file_list:
results[each_file] = pool.apply_async(your_function, args=(output_format, each_file))
# Wait for all processes to finish before proceeding
pool.close()
pool.join()
# Results and any errors are returned
return {your_function: result.get() for your_function, result in results.items()}
def your_function(output_format, file_name):
try:
df = pd.read_csv(file_name)
writer = pd.ExcelWriter(f"{file_name}{output_format}")
df.to_excel(writer)
writer.save()
return "Success!"
except Exception as e:
return str(e)
if __name__ == "__main__":
some_results = multi_processor("your_function", "some_path_to_csv_files", ".xlsx")
print(some_results)
</code></pre>