Skip to content
Extraits de code Groupes Projets
Valider 1ed8d9d1 rédigé par Rémi - Le Filament's avatar Rémi - Le Filament
Parcourir les fichiers

[MOV] agg table name

parent c5b216c1
Aucune branche associée trouvée
Aucune étiquette associée trouvée
1 requête de fusion!2Courbes agrégées
...@@ -8,8 +8,10 @@ from datetime import datetime, timedelta ...@@ -8,8 +8,10 @@ from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from odoo import fields, models from odoo import fields, models
from odoo.osv import expression
from odoo.addons.api_connector.tools.date_utils import local_to_utc from odoo.addons.api_connector.tools.date_utils import local_to_utc
from odoo.addons.oacc.models.acc_enedis_cdc_day import COMP_DATA_TYPE_MAP
class AccOperationImportWizard(models.TransientModel): class AccOperationImportWizard(models.TransientModel):
...@@ -55,7 +57,7 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -55,7 +57,7 @@ class AccOperationImportWizard(models.TransientModel):
# ------------------------------------------------------ # ------------------------------------------------------
# Business methods # Business methods
# ------------------------------------------------------ # ------------------------------------------------------
def delete_existing_data(self, data_filename, counter_id): def _delete_existing_data(self, data_filename, counter_id):
message = "" message = ""
# Suppression des données si déjà existantes # Suppression des données si déjà existantes
date_begin_str = data_filename[1] date_begin_str = data_filename[1]
...@@ -78,35 +80,51 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -78,35 +80,51 @@ class AccOperationImportWizard(models.TransientModel):
computed_data_type = "autocons" computed_data_type = "autocons"
domain_all = [ domain_all = [
("acc_operation_id", "=", self.operation_id.id),
("acc_counter_id", "=", counter_id),
]
domain_cdc = expression.AND([domain_all, [
("date_slot", ">=", start_datetime), ("date_slot", ">=", start_datetime),
("date_slot", "<", end_datetime), ("date_slot", "<", end_datetime),
("acc_operation_id", "=", self.operation_id.id),
("comp_data_type", "=", computed_data_type), ("comp_data_type", "=", computed_data_type),
("acc_counter_id", "=", counter_id.id), ]])
] domain_cdc_day = expression.AND([domain_all, [
rec_ids = self.env[self.model].search(domain_all) ("date_slot", ">=", date_begin_obj.date()),
("date_slot", "<", date_end_obj.date()),
("comp_data_type", "=", COMP_DATA_TYPE_MAP[computed_data_type]),
]])
rec_ids = self.env[self.model].search(domain_cdc)
if rec_ids: if rec_ids:
message += "Suppression des enregistrements existants ... <br/>" message += "Suppression des enregistrements existants de courbes ... <br/>"
rec_ids.unlink() rec_ids.unlink()
message += "Suppression des enregistrements existants OK <br/>" message += "Suppression des enregistrements existants de courbes OK <br/>"
if self.model == "acc.enedis.cdc":
day_rec_ids = self.env["acc.enedis.cdc.day"].search(domain_cdc_day)
if day_rec_ids:
message += "Suppression des enregistrements existants de courbes "\
"agrégées au jour ... <br/>"
day_rec_ids.unlink()
message += "Suppression des enregistrements existants de courbes "\
"agrégées au jour OK <br/>"
return message return message
def create_curve(self, curve_data): def create_curve(self, curve_data):
self.env[self.model].create(curve_data) self.env[self.model].create(curve_data)
def update_partner_id(self, data_filename, counter_id): def _update_partner_id(self, data_filename, counter_id):
date_begin_str = data_filename[1] date_begin_str = data_filename[1]
date_end_str = data_filename[2] date_end_str = data_filename[2]
date_format = "%d%m%Y" date_format = "%d%m%Y"
date_begin_obj = datetime.strptime(date_begin_str, date_format) date_begin_obj = datetime.strptime(date_begin_str, date_format).date()
date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta( date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
days=1 days=1
) ).date()
# Update partner_id for retrieved cdc # Update partner_id for retrieved cdc
domain = [ domain = [
("acc_operation_id", "=", self.operation_id.id), ("acc_operation_id", "=", self.operation_id.id),
("acc_counter_id", "=", counter_id.id or False), ("acc_counter_id", "=", counter_id),
( (
"prm_type", "prm_type",
"=", "=",
...@@ -115,10 +133,10 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -115,10 +133,10 @@ class AccOperationImportWizard(models.TransientModel):
] ]
self.env["acc.counter.period"]._get_periods_from_interval( self.env["acc.counter.period"]._get_periods_from_interval(
domain, date_begin_obj, date_end_obj domain, date_begin_obj, date_end_obj
)._update_cdc_partner_id( ).update_cdc_partner_id(
model=self.model, model=self.model,
date_start=date_begin_obj.date(), date_start=date_begin_obj,
date_end=date_end_obj.date(), date_end=date_end_obj,
) )
def valid_import(self): def valid_import(self):
...@@ -131,9 +149,6 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -131,9 +149,6 @@ class AccOperationImportWizard(models.TransientModel):
+ "</h1>" + "</h1>"
) )
start_dates = []
end_dates = []
for file in self.attachment_ids: for file in self.attachment_ids:
message += ( message += (
"<p><strong>Fichier " "<p><strong>Fichier "
...@@ -143,16 +158,16 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -143,16 +158,16 @@ class AccOperationImportWizard(models.TransientModel):
data_filename = file.name.split("_") data_filename = file.name.split("_")
date_format = "%d%m%Y" date_format = "%d%m%Y"
id_pdm = data_filename[0] id_pdm = data_filename[0]
start_dates.append(datetime.strptime(data_filename[1], date_format)) start_date = datetime.strptime(data_filename[1], date_format)
end_dates.append(datetime.strptime(data_filename[2], date_format)) end_date = datetime.strptime(data_filename[2], date_format)
counter_id = self.env["acc.counter"].search([("name", "=", id_pdm)]) counter_id = self.env["acc.counter"].search([("name", "=", id_pdm)]).id
data_filename[3] = data_filename[3].lower() data_filename[3] = data_filename[3].lower()
if data_filename[3] not in ["prod", "surplus"]: if data_filename[3] not in ["prod", "surplus"]:
data_filename[3] = data_filename[3][:-1] data_filename[3] = data_filename[3][:-1]
message += self.delete_existing_data( message += self._delete_existing_data(
data_filename=data_filename, counter_id=counter_id data_filename=data_filename, counter_id=counter_id
) )
...@@ -163,6 +178,8 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -163,6 +178,8 @@ class AccOperationImportWizard(models.TransientModel):
csv_reader = csv.reader(file_decode, delimiter=";") csv_reader = csv.reader(file_decode, delimiter=";")
file_reader.extend(csv_reader) file_reader.extend(csv_reader)
curves_to_create = []
# Create Data for the CDC # Create Data for the CDC
message += "Lecture et import des données ... <br/>" message += "Lecture et import des données ... <br/>"
for row in file_reader: for row in file_reader:
...@@ -186,11 +203,11 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -186,11 +203,11 @@ class AccOperationImportWizard(models.TransientModel):
else: else:
timestamp = timestamp + timedelta(minutes=timestep) timestamp = timestamp + timedelta(minutes=timestep)
self.create_curve( curves_to_create.append(
{ {
"name": file.name, "name": file.name,
"acc_operation_id": self.operation_id.id, "acc_operation_id": self.operation_id.id,
"acc_counter_id": counter_id.id, "acc_counter_id": counter_id,
"comp_data_type": data_filename[3], "comp_data_type": data_filename[3],
"power": power, "power": power,
"date_slot": timestamp, "date_slot": timestamp,
...@@ -198,7 +215,18 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -198,7 +215,18 @@ class AccOperationImportWizard(models.TransientModel):
} }
) )
self.update_partner_id(data_filename, counter_id) message += "Création des courbes<br/>"
self.create_curve(curves_to_create)
message += "Mise à jour du contact associé aux courbes<br/>"
self._update_partner_id(data_filename, counter_id)
message += "Génération des données agrégées au jour<br/>"
self.env["acc.enedis.cdc.day"].generate(
acc_operation_id=self.operation_id.id,
acc_counter_id=counter_id,
start_date=start_date,
end_date=end_date,
)
message += "Fin de l'import des données OK<br/>" message += "Fin de l'import des données OK<br/>"
# Suppression du fichier après création des enregistrements # Suppression du fichier après création des enregistrements
...@@ -206,24 +234,6 @@ class AccOperationImportWizard(models.TransientModel): ...@@ -206,24 +234,6 @@ class AccOperationImportWizard(models.TransientModel):
file.unlink() file.unlink()
message += "Suppression OK </p>" message += "Suppression OK </p>"
message += "Génération des données agrégées<br/>"
domain = [
("acc_operation_id", "=", self.operation_id.id),
("date_slot", ">=", min(start_dates)),
("date_slot", "<", max(end_dates)),
]
# Vérification si des données existent déjà
# pour cet intervalle de date
rec_ids = self.env["acc.enedis.cdc.agg"].search(domain)
if rec_ids:
rec_ids.unlink()
self.env["acc.enedis.cdc.agg"].generate(
acc_operation_id=self.operation_id.id,
start_date=min(start_dates),
end_date=max(end_dates),
)
message += "<h1>Fin import manuel: " + str(fields.Datetime.now()) + "</h1>" message += "<h1>Fin import manuel: " + str(fields.Datetime.now()) + "</h1>"
# Logs information logs # Logs information logs
......
0% Chargement en cours ou .
You are about to add 0 people to the discussion. Proceed with caution.
Veuillez vous inscrire ou vous pour commenter