diff --git a/__manifest__.py b/__manifest__.py
index 702987ebc26ea658685c1aa53c5be0a19ae96ca4..9ef2c2a7a254ec49fdd2f617c86bba9f0371642d 100644
--- a/__manifest__.py
+++ b/__manifest__.py
@@ -5,7 +5,7 @@
     "website": "https://le-filament.com",
     "version": "16.0.1.0.0",
     "license": "AGPL-3",
-    "depends": ["oacc"],
+    "depends": ["oacc", "oacc_raw_curves"],
     "data": [
         "security/ir.model.access.csv",
         # datas
@@ -13,7 +13,7 @@
         "views/acc_operation_views.xml",
         # views menu
         # wizard
-        "wizard/acc_operation_import_wizard_views.xml",
+
     ],
     "assets": {
         "web._assets_primary_variables": [],
diff --git a/__pycache__/__init__.cpython-311.pyc b/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..a0578289fbd34e9444e2ed356652cbe221d272e7
Binary files /dev/null and b/__pycache__/__init__.cpython-311.pyc differ
diff --git a/models/__pycache__/acc_operation.cpython-311.pyc b/models/__pycache__/acc_operation.cpython-311.pyc
index 8daa02b899d581bd55f6659720e1e7c1f2495e98..de2b421675aa1a7947b06e0637772e24a64350a6 100644
Binary files a/models/__pycache__/acc_operation.cpython-311.pyc and b/models/__pycache__/acc_operation.cpython-311.pyc differ
diff --git a/models/acc_operation.py b/models/acc_operation.py
index 690db88814a01101a853c790af6cc2a0f91975d6..3f5194c418634b11c5f2192e80e8f1bf65c02546 100644
--- a/models/acc_operation.py
+++ b/models/acc_operation.py
@@ -40,122 +40,7 @@ class AccOperation(models.Model):
     # ------------------------------------------------------
     # Actions
     # ------------------------------------------------------
-    def import_check_docs(self, files):
-        mess = ""
-
-        if not files:
-            raise ValidationError(
-                _(
-                    "Aucun fichier à intégrer. Pour joindre des fichiers, "
-                    "veuillez cliquer sur l’icône trombone se trouvant dans "
-                    "le bloc Chatter à droite"
-                )
-            )
-        else:
-            files.mapped("name")
-            for file in files:
-                ext_file = file.name[-3:]
-                if ext_file != "csv":
-                    raise ValidationError(
-                        _(
-                            "Le fichier %s ne semble pas être au bon format. "
-                            "Format CSV attendu."
-                        )
-                        % file.name
-                    )
-
-                data_filename = file.name.split("_")
-
-                if len(data_filename) != 5:
-                    raise ValidationError(
-                        _(
-                            "Le fichier %s ne semble pas correspondre "
-                            "au fichier attendu. Ex de nom de fichier correct: "
-                            "'23555861012080_09022023_08032023_Conso_CDC'."
-                        )
-                        % file.name
-                    )
-
-                date_begin_str = data_filename[1]
-                date_end_str = data_filename[2]
-                computed_data_type = data_filename[3].lower()
-
-                # Contrôle sur le type de données
-                ext_file = data_filename[4][:3]
-                if ext_file != "CDC":
-                    raise UserError(
-                        _(
-                            "Le fichier %s n'a pas le bon type de format, "
-                            "il doit se terminer par 'CDC'."
-                        )
-                        % file.name
-                    )
-
-                # Contrôles sur le type de données CDC
-                if computed_data_type != "prod" and computed_data_type != "surplus":
-                    computed_data_type = data_filename[3].lower()[:-1]
-                    if (
-                        computed_data_type != "cons"
-                        and computed_data_type != "autocons"
-                    ):
-                        raise UserError(
-                            _(
-                                "Le fichier %s ne correspond à aucun type "
-                                "de courbes de charge. il doit contenir Prod, Conso, "
-                                "Autoconso ou Surplus."
-                            )
-                            % file.name
-                        )
-
-                # Contrôle PRM
-                id_prm = data_filename[0]
-                # Vérification existance PRM
-                counter_id = self.env["acc.counter"].search([("name", "=", id_prm)])
-                if not counter_id:
-                    raise ValidationError(
-                        _("Fichier %(file)s : L'ID PRM %(prm)s ne semble pas exister.")
-                        % {"file": file.name, "prm": id_prm}
-                    )
-
-                date_format = "%d%m%Y"
-                date_begin_obj = datetime.strptime(date_begin_str, date_format)
-                date_end_obj = datetime.strptime(
-                    date_end_str, date_format
-                ) + relativedelta(days=1)
-
-                # Vérification si des données existent déjà
-                # pour cet intervalle de date
-                start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
-                end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
-
-                domain_all = [
-                    ("date_slot", ">=", start_datetime),
-                    ("date_slot", "<", end_datetime),
-                    ("acc_operation_id", "=", self.id),
-                    ("comp_data_type", "=", computed_data_type),
-                    ("acc_counter_id", "=", counter_id.id),
-                ]
-                rec_ids = self.env["acc.enedis.cdc"].search(domain_all)
-                if rec_ids:
-                    mess += (
-                        "<p>Fichier "
-                        + file.name
-                        + ": des données existent déjà pour la période du "
-                        + str(date_begin_obj)
-                        + " au "
-                        + str(date_end_obj)
-                        + ". Etes-vous sûr de vouloir écraser "
-                        "les données existantes?</p>"
-                    )
-                else:
-                    mess += (
-                        "<p>Fichier "
-                        + file.name
-                        + ": <strong>Prêt à importer</strong></p>"
-                    )
-            return mess
-
-    def import_cdc(self):
+    def import_raw_cdc(self):
         # vérification des documents à importer
         mess = ""
         for operation in self:
diff --git a/views/acc_operation_views.xml b/views/acc_operation_views.xml
index da46155dd3f00883b651930258cc30c5bdfa3691..530f8c37aff2b16cc7adb2063400e8834e7904f3 100644
--- a/views/acc_operation_views.xml
+++ b/views/acc_operation_views.xml
@@ -2,7 +2,6 @@
 <!-- Copyright 2021- Le Filament (https://le-filament.com)
      License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -->
 <odoo>
-
     <record id="acc_operation_form_view" model="ir.ui.view">
         <field name="name">acc.acc_operation.form.import</field>
         <field name="model">acc.operation</field>
@@ -10,14 +9,13 @@
         <field name="arch" type="xml">
             <xpath expr="//header" position="inside">
                 <button
-                    string="Intégration des données manuelle"
+                    string="Intégration des brutes"
                     type="object"
                     class="btn-primary"
-                    name="import_cdc"
+                    name="import_raw_cdc"
                     groups="oacc.group_operation_superadmin"
                 />
             </xpath>
         </field>
     </record>
-
-</odoo>
+</odoo>
\ No newline at end of file
diff --git a/wizard/__pycache__/__init__.cpython-311.pyc b/wizard/__pycache__/__init__.cpython-311.pyc
index 78e85b4a7ecf28a9e25c83574d3ed9ed4da0ec2c..9815323fce0665d5b956165d265571c04951453f 100644
Binary files a/wizard/__pycache__/__init__.cpython-311.pyc and b/wizard/__pycache__/__init__.cpython-311.pyc differ
diff --git a/wizard/__pycache__/acc_operation_import_wizard.cpython-311.pyc b/wizard/__pycache__/acc_operation_import_wizard.cpython-311.pyc
index 61da2008e6e0e0a55c9a938e249dd7dd232abe07..eba5592b410ff1867473e1c649d5d41846ba7f47 100644
Binary files a/wizard/__pycache__/acc_operation_import_wizard.cpython-311.pyc and b/wizard/__pycache__/acc_operation_import_wizard.cpython-311.pyc differ
diff --git a/wizard/acc_operation_import_wizard.py b/wizard/acc_operation_import_wizard.py
index 8a06972f1cfec18337ea7af5179a9ce928b98f79..b47a369151408441b72ae79ec11bfcbf14544175 100644
--- a/wizard/acc_operation_import_wizard.py
+++ b/wizard/acc_operation_import_wizard.py
@@ -1,8 +1,8 @@
 # Copyright 2021- Le Filament (https://le-filament.com)
 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
-import base64
-import csv
-import io
+
+
+from odoo import api, fields, models
 from datetime import datetime, timedelta
 
 from dateutil.relativedelta import relativedelta
@@ -11,21 +11,12 @@ from odoo import api, fields, models
 
 from odoo.addons.api_connector.tools.date_utils import local_to_utc
 
-
 class AccOperationImportWizard(models.TransientModel):
-    _name = "acc.operation.import.wizard"
-    _description = "Wizard: Import des données brutes"
+    _inherit = "acc.operation.import.wizard"
 
     # ------------------------------------------------------
     # Fields declaration
     # ------------------------------------------------------
-    operation_id = fields.Many2one("acc.operation", "Opération liée")
-    message = fields.Text(
-        string="Message Logs",
-    )
-    attachment_ids = fields.Many2many(
-        comodel_name="ir.attachment", string="Documents à importer"
-    )
 
     # ------------------------------------------------------
     # SQL Constraints
@@ -34,12 +25,6 @@ class AccOperationImportWizard(models.TransientModel):
     # ------------------------------------------------------
     # Default methods
     # ------------------------------------------------------
-    @api.model
-    def default_get(self, fields_list):
-        # OVERRIDE
-        res = super().default_get(fields_list)
-        self._context.get("active_ids")
-        return res
 
     # ------------------------------------------------------
     # Computed fields / Search Fields
@@ -60,129 +45,36 @@ class AccOperationImportWizard(models.TransientModel):
     # ------------------------------------------------------
     # Business methods
     # ------------------------------------------------------
-    def valid_import(self):
-        message = ""
-        message += "<h1>Début Import manuelle: " + str(fields.Datetime.now()) + "</h1>"
-        for file in self.attachment_ids:
-            message += (
-                "<p><strong>Fichier "
-                + file.name
-                + "</strong><br/>Début Import ... <br/>"
-            )
-            data_filename = file.name.split("_")
-            id_pdm = data_filename[0]
-
-            counter_id = self.env["acc.counter"].search([("name", "=", id_pdm)])
-
-            computed_data_type = data_filename[3].lower()
-            if computed_data_type != "prod" and computed_data_type != "surplus":
-                computed_data_type = data_filename[3].lower()[:-1]
-
-            # Suppression des données si déjà existantes
-            date_begin_str = data_filename[1]
-            date_end_str = data_filename[2]
-            date_format = "%d%m%Y"
-            date_begin_obj = datetime.strptime(date_begin_str, date_format)
-            date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
-                days=1
-            )
-
-            # Vérification si des données existent déjà
-            # pour cet intervalle de date
-            start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
-            end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
-
-            domain_all = [
-                ("date_slot", ">=", start_datetime),
-                ("date_slot", "<", end_datetime),
-                ("acc_operation_id", "=", self.operation_id.id),
-                ("comp_data_type", "=", computed_data_type),
-                ("acc_counter_id", "=", counter_id.id),
-            ]
-            rec_ids = self.env["acc.enedis.cdc"].search(domain_all)
-            if rec_ids:
-                message += "Suppression des enregistrements existants ... <br/>"
-                rec_ids.unlink()
-                message += "Suppression des enregistrements existants OK <br/>"
-
-            file_decode = io.StringIO(base64.b64decode(file.datas).decode("UTF-8"))
-            file_decode.seek(0)
-
-            file_reader = []
-            csv_reader = csv.reader(file_decode, delimiter=";")
-            file_reader.extend(csv_reader)
-
-            # Create Data for the CDC
-            message += "Lecture et import des données ... <br/>"
-            for row in file_reader:
-                # Create 1st slot 0-30min
-                slot_datetime_tz = datetime.strptime(row[0], "%d/%m/%Y %H:%M")
-                slot_datetime_utc = local_to_utc(slot_datetime_tz, "Europe/Paris")
-                self.env["acc.enedis.cdc"].create(
-                    {
-                        "name": file.name,
-                        "acc_operation_id": self.operation_id.id,
-                        "acc_counter_id": counter_id.id,
-                        "comp_data_type": computed_data_type,
-                        "power": row[1],
-                        "date_slot": slot_datetime_utc,
-                    }
-                )
-                # Create 2nd slot 30-60min
-                self.env["acc.enedis.cdc"].create(
-                    {
-                        "name": file.name,
-                        "acc_operation_id": self.operation_id.id,
-                        "acc_counter_id": counter_id.id,
-                        "comp_data_type": computed_data_type,
-                        "power": row[2],
-                        "date_slot": slot_datetime_utc + timedelta(minutes=30),
-                    }
-                )
-
-            # Update partner_id for retrieved cdc
-            domain = [
-                ("acc_operation_id", "=", self.operation_id.id),
-                ("acc_counter_id", "=", counter_id.id or False),
-                (
-                    "prm_type",
-                    "=",
-                    "delivery"
-                    if computed_data_type in ("autocons", "cons")
-                    else "injection",
-                ),
-            ]
-            self.env["acc.counter.period"]._get_periods_from_interval(
-                domain, date_begin_obj, date_end_obj
-            )._update_cdc_partner_id()
-            message += "Fin de l'Import des données OK<br/>"
-            # Suppression du fichier après création des enregistrements
-            message += "Suppression du fichiers " + file.name + " ...<br/>"
-            file.unlink()
-            message += "Suppression OK </p>"
-
-        message += "<h1>Fin Import manuelle: " + str(fields.Datetime.now()) + "</h1>"
-        # Logs information logs
-        log_id = self.env["acc.logs"].create(
-            {
-                "name": "Import du " + str(fields.Date.today()) + " manuelle",
-                "date_launched": fields.Datetime.now(),
-                "type_log": "manual",
-                "message": message,
-                "acc_operation_id": self.operation_id.id,
-            }
+    def delete_existing_data(self, data_filename, counter_id, message):
+
+        # Suppression des données si déjà existantes
+        date_begin_str = data_filename[1]
+        date_end_str = data_filename[2]
+        date_format = "%d%m%Y"
+        date_begin_obj = datetime.strptime(date_begin_str, date_format)
+        date_end_obj = datetime.strptime(date_end_str, date_format) + relativedelta(
+            days=1
         )
 
-        view_id = self.env.ref("oacc.acc_logs_form").id
-        return {
-            "name": "LOGS",
-            "view_type": "form",
-            "view_mode": "form",
-            "views": [(view_id, "form")],
-            "res_model": "acc.logs",
-            "view_id": view_id,
-            "type": "ir.actions.act_window",
-            "res_id": log_id.id,
-            "target": "new",
-            "flags": {"initial_mode": "view"},
-        }
+        # Vérification si des données existent déjà
+        # pour cet intervalle de date
+        start_datetime = local_to_utc(date_begin_obj, "Europe/Paris")
+        end_datetime = local_to_utc(date_end_obj, "Europe/Paris")
+
+        domain_all = [
+            ("date_slot", ">=", start_datetime),
+            ("date_slot", "<", end_datetime),
+            ("acc_operation_id", "=", self.operation_id.id),
+            ("comp_data_type", "=", data_filename[3]),
+            ("acc_counter_id", "=", counter_id.id),
+        ]
+        rec_ids = self.env["acc.enedis.raw.cdc"].search(domain_all)
+        if rec_ids:
+            message += "Suppression des enregistrements existants ... <br/>"
+            rec_ids.unlink()
+            message += "Suppression des enregistrements existants OK <br/>"
+
+        return message
+
+    def create_curve(self, curve_data):
+        self.env["acc.enedis.raw.cdc"].create(curve_data)
diff --git a/wizard/acc_operation_import_wizard_views.xml b/wizard/acc_operation_import_wizard_views.xml
deleted file mode 100644
index 53a9b3c75792ea132718ee34401a34131df71543..0000000000000000000000000000000000000000
--- a/wizard/acc_operation_import_wizard_views.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!-- Copyright 2021- Le Filament (https://le-filament.com)
-     License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). -->
-<odoo>
-
-    <record id="acc_operation_import_wizard_form" model="ir.ui.view">
-        <field name="name">acc.operation.import.wizard.form</field>
-        <field name="model">acc.operation.import.wizard</field>
-        <field name="arch" type="xml">
-            <form string="Import des fichiers">
-                <div>
-                    <field
-                        class="o_field_header"
-                        name="message"
-                        readonly="1"
-                        widget="html"
-                    />
-                </div>
-                <group>
-                    <field name="operation_id" invisible="1" />
-                    <field name="attachment_ids" invisible="1" />
-                </group>
-                <footer>
-                    <button
-                        name="valid_import"
-                        type="object"
-                        string="Valider l'import"
-                        class="oe_highlight"
-                    />
-                    <button special="cancel" string="Annuler" />
-                </footer>
-            </form>
-        </field>
-    </record>
-
-</odoo>