I have a csv script that runs within a sequence on a set of gathered urls like so: threaded(urls, write_csv, num_threads=5). The script writes to the csv but seems to rewrite the first row for each url rather than writing to new rows for each subsequent url. I'm new to python and Dani; Do I need to add a counter and index to accomplish this or restructure the program entirely? Here's the relevant code and thanks in advance:
import csv
from thready import threaded
def get_links():
#gather urls
threaded(urls, write_csv, num_threads=5)
def write_csv(url):
data = {
'scrapeUrl': url,
'model': final_model_num,
'title': final_name,
'description': final_description,
'price': str(final_price),
'image': final_first_image,
'additional_image': final_images,
'quantity': '1',
'subtract': '1',
'minimum': '1',
'status': '1',
'shipping': '1'
}
with open("local/file1.csv", "w") as f:
writer=csv.writer(f, delimiter=",")
writer.writerows([data.keys()])
writer.writerow([s.encode('ascii', 'ignore') for s in data.values()])
if __name__ == '__main__':
get_links()