diff --git a/__manifest__.py b/__manifest__.py
index 98efaefb451812d0f4d91ed57f4d23b1cd3adc08..162e8338288472104a9511e30c61ee7755df74f4 100644
--- a/__manifest__.py
+++ b/__manifest__.py
@@ -7,19 +7,13 @@
     "license": "AGPL-3",
     "depends": ["oacc"],
     "data": [
+        "security/ir.model.access.csv",
         # datas
         # views
         # views menu
         # wizard
         "wizard/acc_operation_import_wizard_views.xml",
     ],
-    "assets": {
-        "web._assets_primary_variables": [],
-        "web._assets_frontend_helpers": [],
-        "web.assets_frontend": [],
-        "web.assets_tests": [],
-        "web.assets_qweb": [],
-    },
     "installable": True,
     "auto_install": False,
 }
diff --git a/models/acc_operation.py b/models/acc_operation.py
index cd9a948880f20c54556caf6b33c0e5c903074478..50b6dae001f76a52b57920f5fdb167ab0248f6d7 100644
--- a/models/acc_operation.py
+++ b/models/acc_operation.py
@@ -2,6 +2,7 @@
 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
 from datetime import datetime
 from os.path import splitext
+
 from dateutil.relativedelta import relativedelta
 
 from odoo import _, models
@@ -40,119 +41,152 @@ class AccOperation(models.Model):
     # ------------------------------------------------------
     # Actions
     # ------------------------------------------------------
-    def import_check_docs(self, files):
+
+    # ------------------------------------------------------
+    # Business methods
+    # ------------------------------------------------------
+    def _import_check_docs(self, model):
+        raise_error = False
+        error_msg = ""
         mess = ""
 
+        if not model:
+            raise ValidationError(_("Le type de courbes à importer n'est pas défini"))
+        self.ensure_one()
+        files = self.env["ir.attachment"].search(
+            [("res_model", "=", "acc.operation"), ("res_id", "=", self.id)]
+        )
         if not files:
-            raise ValidationError(
+            raise UserError(
                 _(
                     "Aucun fichier à intégrer. Pour joindre des fichiers, "
                     "veuillez cliquer sur l’icône trombone se trouvant dans "
-                    "le bloc Chatter à droite"
+                    "le bloc Chatter à droite (ou en-dessous en fonction de "
+                    "la résolution de votre écran)."
                 )
             )
-        else:
-            files.mapped("name")
-            for file in files:
-                file_suffix = splitext(file.name)[1]
-                file_name = splitext(file.name)[0]
-
-                if file_suffix != ".csv":
-                    raise ValidationError(
-                        _(
-                            "Le fichier %s ne semble pas être au bon format. "
-                            "Format CSV attendu."
-                        )
-                        % file.name
-                    )
 
-                data_filename = file_name.split("_")
+        for file in files:
+            file_suffix = splitext(file.name)[1]
+            file_name = splitext(file.name)[0]
 
-                if len(data_filename) != 5:
-                    raise ValidationError(
-                        _(
-                            "Le fichier %s ne semble pas correspondre "
-                            "au fichier attendu. Ex de nom de fichier correct: "
-                            "'23555861012080_09022023_08032023_Conso_CDC'."
-                        )
-                        % file.name
+            if file_suffix != ".csv":
+                raise_error = True
+                error_msg += (
+                    _(
+                        "Le fichier %s ne semble pas être au bon format. "
+                        "Format CSV attendu.\n"
                     )
+                    % file.name
+                )
 
-                date_begin_str = data_filename[1]
-                date_end_str = data_filename[2]
-
-                # Contrôles sur le type de données CDC
-                if data_filename[3] not in ["Prod", "Conso", "Autoconso", "Surplus"]:
-                    raise UserError(
-                        _(
-                            "Le fichier %s ne correspond à aucun type "
-                            "de courbes de charge. il doit contenir Prod, Conso, "
-                            "Autoconso ou Surplus."
-                        )
-                        % file.name
-                    )
+            data_filename = file_name.split("_")
 
-                computed_data_type = data_filename[3].lower()
-
-                # Contrôle sur le type de données
-                file_suffix = data_filename[4]
-                if file_suffix != "CDC":
-                    raise UserError(
-                        _(
-                            "Le fichier %s n'a pas le bon type de format, "
-                            "il doit se terminer par 'CDC'."
-                        )
-                        % file.name
+            if len(data_filename) != 5:
+                raise_error = True
+                error_msg += (
+                    _(
+                        "Le fichier %s ne semble pas correspondre "
+                        "au fichier attendu. Ex de nom de fichier correct: "
+                        "'23555861012080_09022023_08032023_Conso_CDC'.\n"
                     )
-
-                # Contrôle PRM
-                id_prm = data_filename[0]
-                # Vérification existance PRM
-                counter_id = self.env["acc.counter"].search([("name", "=", id_prm)])
-                if not counter_id:
-                    raise ValidationError(
-                        _("Fichier %(file)s : L'ID PRM %(prm)s ne semble pas exister.")
-                        % {"file": file.name, "prm": id_prm}
+                    % file.name
+                )
+                continue
+
+            date_begin_str = data_filename[1]
+            date_end_str = data_filename[2]
+
+            # Contrôles sur le type de données CDC
+            if data_filename[3] not in ["Prod", "Conso", "Autoconso", "Surplus"]:
+                raise_error = True
+                error_msg += (
+                    _(
+                        "Le fichier %s ne correspond à aucun type "
+                        "de courbes de charge. il doit contenir Prod, Conso, "
+                        "Autoconso ou Surplus."
                     )
+                    % file.name
+                )
 
-                date_format = "%d%m%Y"
-                date_begin_obj = datetime.strptime(date_begin_str, date_format)
-                date_end_obj = datetime.strptime(
-                    date_end_str, date_format
-                ) + relativedelta(days=1)
-
-                # Vérification si des données existent déjà
-                # pour cet intervalle de date
-                start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
-                end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
-
-                domain_all = [
-                    ("date_slot", ">=", start_datetime),
-                    ("date_slot", "<", end_datetime),
-                    ("acc_operation_id", "=", self.id),
-                    ("comp_data_type", "=", computed_data_type),
-                    ("acc_counter_id", "=", counter_id.id),
-                ]
-                rec_ids = self.env["acc.enedis.cdc"].search(domain_all)
-                if rec_ids:
-                    mess += (
-                        "<p>Fichier "
-                        + file.name
-                        + ": des données existent déjà pour la période du "
-                        + str(date_begin_obj)
-                        + " au "
-                        + str(date_end_obj)
-                        + ". Etes-vous sûr de vouloir écraser "
-                        "les données existantes?</p>"
+            computed_data_type = data_filename[3].lower()
+            if computed_data_type == "conso":
+                computed_data_type = "cons"
+            elif computed_data_type == "autoconso":
+                computed_data_type = "autocons"
+
+            # Contrôle sur le type de données
+            file_suffix = data_filename[4]
+            if file_suffix != "CDC":
+                raise_error = True
+                error_msg += (
+                    _(
+                        "Le fichier %s n'a pas le bon type de format, "
+                        "il doit se terminer par 'CDC'.\n"
                     )
-                else:
-                    mess += (
-                        "<p>Fichier "
-                        + file.name
-                        + ": <strong>Prêt à importer</strong></p>"
-                    )
-            return mess
+                    % file.name
+                )
 
-    # ------------------------------------------------------
-    # Business methods
-    # ------------------------------------------------------
+            # Contrôle PRM
+            id_prm = data_filename[0]
+            # Vérification existence PRM
+            counter_id = self.env["acc.counter"].search([("name", "=", id_prm)])
+            if not counter_id:
+                raise_error = True
+                error_msg += _(
+                    "Fichier %(file)s : L'ID PRM %(prm)s ne semble pas exister.\n"
+                ) % {"file": file.name, "prm": id_prm}
+
+            date_format = "%d%m%Y"
+            date_begin_obj = datetime.strptime(date_begin_str, date_format)
+            date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
+                days=1
+            )
+
+            # Vérification si des données existent déjà
+            # pour cet intervalle de date
+            start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
+            end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
+
+            domain_all = [
+                ("date_slot", ">=", start_datetime),
+                ("date_slot", "<", end_datetime),
+                ("acc_operation_id", "=", self.id),
+                ("comp_data_type", "=", computed_data_type),
+                ("acc_counter_id", "=", counter_id.id),
+            ]
+            rec_ids = self.env[model].search(domain_all)
+            if rec_ids:
+                mess += (
+                    "<p>Fichier "
+                    + file.name
+                    + ": des données existent déjà pour la période du "
+                    + str(date_begin_obj)
+                    + " au "
+                    + str(date_end_obj)
+                    + ". Etes-vous sûr de vouloir écraser "
+                    "les données existantes ?</p>"
+                )
+            else:
+                mess += (
+                    "<p>Fichier " + file.name + ": <strong>Prêt à importer</strong></p>"
+                )
+        if raise_error:
+            raise ValidationError(error_msg)
+        if mess:
+            wizard = self.env["acc.operation.import.wizard"].create(
+                {
+                    "operation_id": self.id,
+                    "attachment_ids": files,
+                    "model": model,
+                    "message": mess,
+                }
+            )
+            return {
+                "name": "Confirmation d'import",
+                "type": "ir.actions.act_window",
+                "view_mode": "form",
+                "res_model": "acc.operation.import.wizard",
+                "res_id": wizard.id,
+                "target": "new",
+            }
diff --git a/wizard/acc_operation_import_wizard.py b/wizard/acc_operation_import_wizard.py
index b1071673b95cae1eb783adcff237e3246f7db10a..dfa2e28db2e2270c8a2e86a5f488b27e1165d55d 100644
--- a/wizard/acc_operation_import_wizard.py
+++ b/wizard/acc_operation_import_wizard.py
@@ -7,7 +7,7 @@ from datetime import datetime, timedelta
 
 from dateutil.relativedelta import relativedelta
 
-from odoo import api, fields, models
+from odoo import fields, models
 
 from odoo.addons.api_connector.tools.date_utils import local_to_utc
 
@@ -21,8 +21,9 @@ class AccOperationImportWizard(models.TransientModel):
     # ------------------------------------------------------
     operation_id = fields.Many2one("acc.operation", "Opération liée")
     message = fields.Text(
-        string="Message Logs",
+        string="Message Log",
     )
+    model = fields.Selection(selection=[], required=True)
     attachment_ids = fields.Many2many(
         comodel_name="ir.attachment", string="Documents à importer"
     )
@@ -34,12 +35,6 @@ class AccOperationImportWizard(models.TransientModel):
     # ------------------------------------------------------
     # Default methods
     # ------------------------------------------------------
-    @api.model
-    def default_get(self, fields_list):
-        # OVERRIDE
-        res = super().default_get(fields_list)
-        self._context.get("active_ids")
-        return res
 
     # ------------------------------------------------------
     # Computed fields / Search Fields
@@ -60,27 +55,77 @@ class AccOperationImportWizard(models.TransientModel):
     # ------------------------------------------------------
     # Business methods
     # ------------------------------------------------------
-    def delete_existing_data(self, data_filename, counter_id, message):
-        """
-        deleting existing data depends of wich curves type you import
-        implemented in each modules
-        """
-        message += "Delete data not implemented"
+    def delete_existing_data(self, data_filename, counter_id):
+        message = ""
+        # Suppression des données si déjà existantes
+        date_begin_str = data_filename[1]
+        date_end_str = data_filename[2]
+        date_format = "%d%m%Y"
+        date_begin_obj = datetime.strptime(date_begin_str, date_format)
+        date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
+            days=1
+        )
+
+        # Vérification si des données existent déjà
+        # pour cet intervalle de date
+        start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
+        end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
+
+        computed_data_type = data_filename[3].lower()
+        if computed_data_type == "conso":
+            computed_data_type = "cons"
+        elif computed_data_type == "autoconso":
+            computed_data_type = "autocons"
+
+        domain_all = [
+            ("date_slot", ">=", start_datetime),
+            ("date_slot", "<", end_datetime),
+            ("acc_operation_id", "=", self.operation_id.id),
+            ("comp_data_type", "=", computed_data_type),
+            ("acc_counter_id", "=", counter_id.id),
+        ]
+        rec_ids = self.env[self.model].search(domain_all)
+        if rec_ids:
+            message += "Suppression des enregistrements existants ... <br/>"
+            rec_ids.unlink()
+            message += "Suppression des enregistrements existants OK <br/>"
+
         return message
 
     def create_curve(self, curve_data):
-        """
-        creating curve depends of wich curves type you import
-        implemented in each modules
-        """
-        pass
+        self.env[self.model].create(curve_data)
 
     def update_partner_id(self, data_filename, counter_id):
-        pass
+        date_begin_str = data_filename[1]
+        date_end_str = data_filename[2]
+        date_format = "%d%m%Y"
+        date_begin_obj = datetime.strptime(date_begin_str, date_format)
+        date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
+            days=1
+        )
+        # Update partner_id for retrieved cdc
+        domain = [
+            ("acc_operation_id", "=", self.operation_id.id),
+            ("acc_counter_id", "=", counter_id.id or False),
+            (
+                "prm_type",
+                "=",
+                "delivery" if data_filename[3] in ("autocons", "cons") else "injection",
+            ),
+        ]
+        self.env["acc.counter.period"]._get_periods_from_interval(
+            domain, date_begin_obj, date_end_obj
+        )._update_cdc_partner_id(model=self.model)
 
     def valid_import(self):
         message = ""
-        message += "<h1>Début Import manuelle: " + str(fields.Datetime.now()) + "</h1>"
+        message += (
+            "<h1>Début Import manuel: "
+            + str(fields.Datetime.now())
+            + " - "
+            + self.model
+            + "</h1>"
+        )
         for file in self.attachment_ids:
             message += (
                 "<p><strong>Fichier "
@@ -96,8 +141,8 @@ class AccOperationImportWizard(models.TransientModel):
             if data_filename[3] not in ["prod", "surplus"]:
                 data_filename[3] = data_filename[3][:-1]
 
-            message = self.delete_existing_data(
-                data_filename=data_filename, counter_id=counter_id, message=message
+            message += self.delete_existing_data(
+                data_filename=data_filename, counter_id=counter_id
             )
 
             file_decode = io.StringIO(base64.b64decode(file.datas).decode("UTF-8"))
@@ -144,17 +189,20 @@ class AccOperationImportWizard(models.TransientModel):
 
             self.update_partner_id(data_filename, counter_id)
 
-            message += "Fin de l'Import des données OK<br/>"
+            message += "Fin de l'import des données OK<br/>"
             # Suppression du fichier après création des enregistrements
-            message += "Suppression du fichiers " + file.name + " ...<br/>"
+            message += "Suppression du fichier " + file.name + " ...<br/>"
             file.unlink()
             message += "Suppression OK </p>"
 
-        message += "<h1>Fin Import manuelle: " + str(fields.Datetime.now()) + "</h1>"
+        message += "<h1>Fin import manuel: " + str(fields.Datetime.now()) + "</h1>"
         # Logs information logs
         log_id = self.env["acc.logs"].create(
             {
-                "name": "Import du " + str(fields.Date.today()) + " manuelle",
+                "name": "Import du "
+                + str(fields.Date.today())
+                + " manuel - "
+                + self.model,
                 "date_launched": fields.Datetime.now(),
                 "type_log": "manual",
                 "message": message,