isolated cooploading functionality

This commit is contained in:
Sam
2021-02-26 11:46:40 +00:00
parent 86020afd27
commit 0f7af9c0fb
4 changed files with 42 additions and 29 deletions

View File

@@ -13,6 +13,8 @@ from rest_framework_simplejwt.tokens import RefreshToken
from tagulous.models import TagModel from tagulous.models import TagModel
from companies.models import Company
User = get_user_model() User = get_user_model()
@@ -98,3 +100,40 @@ def reformat_google_taxonomy(file_name):
line = line.replace(' > ', '/') line = line.replace(' > ', '/')
destination_file.write(line) destination_file.write(line)
def coop_loader(csv_reader, request):
"""
Parse csv data and extract:
- coop data
- manager user data
Return counts
"""
coop_counter = 0
user_counter = 0
for row in csv_reader:
if '' in (row['cif'], row['nombre-coop'], row['email']):
logging.error(f"Required data missing: {row}")
continue
try:
coop_data = {
'cif': row['cif'].strip(),
'company_name': row['nombre-coop'].strip(),
'short_name': row['nombre-corto'].strip(),
'shop': bool(row['es-tienda'].strip()),
'shop_link': row['url'].strip(),
}
coop = Company.objects.create(**coop_data)
logging.info(f"Created Coop: {coop_data}")
coop_counter += 1
coop_user = User.objects.create_user(email=row['email'], company=coop, role='COOP_MANAGER', is_active=False)
# send confirmation email
send_verification_email(request, coop_user)
logging.info(f"Created User: {coop_user}")
user_counter += 1
except Exception as e:
logging.error(f"Could not parse {row}")
return coop_counter, user_counter

View File

@@ -183,35 +183,11 @@ def load_coop_managers(request):
logging.info(f"Reading contents of {csv_file.name}") logging.info(f"Reading contents of {csv_file.name}")
decoded_file = csv_file.read().decode('utf-8').splitlines() decoded_file = csv_file.read().decode('utf-8').splitlines()
csv_reader = csv.DictReader(decoded_file, delimiter=',') csv_reader = csv.DictReader(decoded_file, delimiter=',')
coop_counter = 0 coop_count, user_count = utils.coop_loader(csv_reader, request)
user_counter = 0
for row in csv_reader:
if '' in (row['cif'], row['nombre-coop'], row['email']):
logging.error(f"Required data missing: {row}")
continue
try:
coop_data = {
'cif': row['cif'].strip(),
'company_name': row['nombre-coop'].strip(),
'short_name': row['nombre-corto'].strip(),
'shop': bool(row['es-tienda'].strip()),
'shop_link': row['url'].strip(),
}
coop = Company.objects.create(**coop_data)
logging.info(f"Created Coop: {coop_data}")
coop_counter += 1
coop_user = User.objects.create_user(email=row['email'], company=coop, role='COOP_MANAGER', is_active=False) return Response({'details': f"Created {coop_count} Companies, {user_count} Managing Users"})
# send confirmation email
utils.send_verification_email(request, coop_user)
logging.info(f"Created User: {coop_user}")
user_counter += 1
except Exception as e:
logging.error(f"Could not parse {row}")
return Response()
except Exception as e: except Exception as e:
return Response({"errors": {"details": str(type(e))}}) return Response({"errors": {"details": f'{type(e)}: {e}'}})
@api_view(['GET',]) @api_view(['GET',])

View File

@@ -533,7 +533,6 @@ class ProductSearchTest(TestCase):
# check prices # check prices
self.assertTrue(payload['prices']['min'] <= payload['prices']['max']) self.assertTrue(payload['prices']['min'] <= payload['prices']['max'])
def test_anon_user_can_paginate_search(self): def test_anon_user_can_paginate_search(self):
expected_instances = [ expected_instances = [
self.factory(tags="lunares/rojos", category='zapatos', description="zapatos verdes"), self.factory(tags="lunares/rojos", category='zapatos', description="zapatos verdes"),

View File

@@ -212,7 +212,6 @@ def product_search(request):
for chunk in chunks: for chunk in chunks:
product_set, min_price, max_price = find_related_products_v6(chunk, shipping_cost, discount, category, tags, price_min, price_max) product_set, min_price, max_price = find_related_products_v6(chunk, shipping_cost, discount, category, tags, price_min, price_max)
# update price values # update price values
# import ipdb; ipdb.set_trace()
if prices['min'] is None or min_price['price__min'] < prices['min']: if prices['min'] is None or min_price['price__min'] < prices['min']:
prices['min'] = min_price['price__min'] prices['min'] = min_price['price__min']
if prices['max'] is None or max_price['price__max'] > prices['max']: if prices['max'] is None or max_price['price__max'] > prices['max']: