diff --git a/data/queue_data.xml b/data/queue_data.xml
index 6d03a6f8e64986835d2b420e6fa6df2508cb8b0c..ea59c303ce260be8f35e4b9c28a69598c56972cc 100644
--- a/data/queue_data.xml
+++ b/data/queue_data.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8" ?>
 <odoo>
     <record id="channel_wts" model="queue.job.channel">
         <field name="name">wts_channel</field>
diff --git a/models/hall_flow_log.py b/models/hall_flow_log.py
index 755ac53b0f5bfe7a71c39c80d03d74f32658ad3a..edbe9fe06fdad7fe2c7d5f189c06417c9b35ca2f 100644
--- a/models/hall_flow_log.py
+++ b/models/hall_flow_log.py
@@ -3,14 +3,14 @@
 
 from dateutil.relativedelta import relativedelta
 
-from odoo import _, api, fields, models
-from odoo.exceptions import UserError
-from odoo.addons.api_connector.tools.date_utils import local_to_utc
+from odoo import api, fields, models
 
 
 class HallFlowLog(models.Model):
     _name = "hall.flow.log"
     _description = "Hall Flow Logs"
+    _rec_name = "date_log"
+    _order = "date_log desc"
 
     # ------------------------------------------------------
     # Fields declaration
@@ -39,110 +39,63 @@ class HallFlowLog(models.Model):
         """
         if not hall_ids:
             hall_ids = self.env["hall.hall"].search([])
-        log_ids = self
+        if not (start_date and end_date):
+            start_date = fields.Datetime.start_of(
+                fields.Datetime.now(), granularity="day"
+            )
+            end_date = fields.Datetime.end_of(fields.Datetime.now(), granularity="day")
+        if not hall_ids.mapped("zone_ids"):
+            return
+
         for hall in hall_ids:
             if hall.zone_ids:
-                last_import = self._get_last_import(hall)
-                if start_date and last_import and start_date < last_import:
-                    raise UserError(_(
-                        "Un import existe déjà pour cette date %s"
-                    ) % (fields.Date.to_string(start_date)))
-                if not start_date:
-                    start_date = last_import + relativedelta(days=1)
-                if not end_date:
-                    end_date = fields.Date.today() - relativedelta(days=1)
-
-                log_id = self.create({
-                    "date_log": fields.Datetime.now(),
-                    "date_start": start_date,
-                    "date_end": end_date,
-                    "hall_id": hall.id
-                })
-
+                log_id = self.create(
+                    {
+                        "date_log": fields.Datetime.now(),
+                        "date_start": start_date,
+                        "date_end": end_date,
+                        "hall_id": hall.id,
+                    }
+                )
                 for zone in hall.zone_ids:
                     # Retrieve visitor datas
-                    visit_desc = log_id.hall_id.name + " - " + zone.name + " - Visiteurs"
-                    zone.with_delay(description=visit_desc)._wts_create_visitor(
-                        start_date, end_date, log_id
+                    visit_desc = "%s - %s - Visiteurs" % (
+                        log_id.hall_id.name,
+                        zone.name,
+                    )
+                    zone.with_delay(description=visit_desc)._wts_create_entry(
+                        start_date, end_date, log_id, endpoint="visitor"
+                    )
+                    pass_desc = "%s - %s - Passants" % (log_id.hall_id.name, zone.name)
+                    zone.with_delay(description=pass_desc)._wts_create_entry(
+                        start_date, end_date, log_id, endpoint="passerby"
                     )
-                    # Retrieve passer_by datas
-                    delta_days = (end_date - start_date).days
-                    pass_desc = log_id.hall_id.name + " - " + zone.name + " - Passants"
-                    if delta_days > 0:
-                        start_passerby = start_date
-                        for day in range(0, delta_days):
-                            zone.with_delay(description=pass_desc, max_retries=10)._wts_create_passerby(
-                                start_passerby, end_date, log_id
-                            )
-                            start_passerby = start_passerby + relativedelta(days=1, hours=1)
-                    else:
-                        zone.with_delay(description=pass_desc, max_retries=10)._wts_create_passerby(
-                            start_date, end_date, log_id
-                        )
-                    # Retrieve In/Out datas
-                    for sensor in zone.sensor_ids.filtered(lambda s: s.sensor_type == "sensor"):
-                        description = log_id.hall_id.name + " - " + sensor.name
-                        sensor.with_delay(description=description, max_retries=10)._wts_create_inout(
-                            start_date, end_date, log_id
-                        )
-
-                log_ids += log_id
-        return log_ids
 
     # ------------------------------------------------------
     # CRON functions
     # ------------------------------------------------------
-    def cron_wts(self, days=None, interval=None):
+    def cron_wts(self, days=1, interval=None):
         """
         WTS datas retrieve CRON
 
         :param integer days: days before today if specified
         :param integer interval: number of days to retrieve
+        :param bool group_by_day: aggregate result number by day
         """
-        now = fields.Datetime.now()
-        day_end = float(
-            self.env["ir.config_parameter"].sudo().get_param("hall.day_end")
-        )
-        hour_end = int(day_end)
-        minute_end = int((day_end - hour_end) * 60)
-        day_start = float(
-            self.env["ir.config_parameter"].sudo().get_param("hall.day_start")
-        )
-        hour_start = int(day_start)
-        minute_start = int((day_start - hour_start) * 60)
-
-        timezone = self._context.get("tz") if self._context.get("tz") else "Europe/Paris"
-        end_date = local_to_utc(
-            now.replace(hour=hour_end + 1, minute=minute_end, second=0), timezone
-        )
-
-        if now < end_date:
-            end_date = end_date - relativedelta(days=1)
+        yesterday = fields.Datetime.now() - relativedelta(days=1)
+        start_date = fields.Datetime.start_of(yesterday, granularity="day")
+        end_date = fields.Datetime.end_of(yesterday, granularity="day")
 
         if days:
+            start_date = start_date - relativedelta(days=days)
             end_date = end_date - relativedelta(days=days)
 
-        start_date = (end_date - relativedelta(days=1)).replace(
-            hour=hour_start, minute=minute_start
-        )
-        start_date = local_to_utc(start_date, timezone)
-
         if interval and interval > 1:
             start_date = start_date - relativedelta(days=interval)
             end_date = end_date - relativedelta(days=interval)
-            for day in range(0, interval):
-                self.import_wts_data(start_date=start_date, end_date=end_date)
-                start_date = start_date + relativedelta(days=1)
-                end_date = end_date + relativedelta(days=1)
-        else:
-            self.import_wts_data(start_date=start_date, end_date=end_date)
+
+        self.import_wts_data(start_date=start_date, end_date=end_date)
 
     # ------------------------------------------------------
     # Common functions
     # ------------------------------------------------------
-    def _get_last_import(self, hall_id):
-        last_log_id = self.search(
-            [("hall_id", "=", hall_id.id)], order="date_end desc", limit=1
-        )
-        return last_log_id.date_end if last_log_id else False
-
diff --git a/models/hall_flow_sensor.py b/models/hall_flow_sensor.py
index 6a86e5ac004d49e676adc63270dd271ab226d779..7880058342004ab3a5d37f4f21d61609fd802f6e 100644
--- a/models/hall_flow_sensor.py
+++ b/models/hall_flow_sensor.py
@@ -4,6 +4,7 @@
 from datetime import datetime
 
 from odoo import fields, models
+
 from odoo.addons.api_connector.tools.date_utils import local_to_utc
 
 
@@ -72,7 +73,7 @@ class HallFlowSensor(models.Model):
                             {
                                 "date": local_to_utc(
                                     datetime.strptime(date, "%Y-%m-%d %H:%M:%S"),
-                                    sensor.zone_id.timezone
+                                    sensor.zone_id.timezone,
                                 ),
                                 "flow_type": key,
                                 "count": data,
diff --git a/models/hall_flow_zone.py b/models/hall_flow_zone.py
index d39faf939a08e8364840edf48ee69907681a6900..689450f68bbfa150a940a3e592eaabfb34d92002 100644
--- a/models/hall_flow_zone.py
+++ b/models/hall_flow_zone.py
@@ -1,10 +1,11 @@
 # Copyright 2023 Le Filament (https://le-filament.com)
 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
 
-from datetime import datetime
+from psycopg2.errors import UniqueViolation
+
+from odoo import _, fields, models
+from odoo.exceptions import UserError, ValidationError
 
-from odoo import fields, models
-from odoo.addons.api_connector.tools.date_utils import local_to_utc
 from odoo.addons.queue_job.exception import FailedJobError
 
 
@@ -46,10 +47,7 @@ class HallFlowZone(models.Model):
         inverse_name="zone_id",
         string="Capteurs",
     )
-    timezone = fields.Char(
-        string='Timezone',
-        default=lambda self: self._context.get('tz')
-    )
+    timezone = fields.Char(default=lambda self: self._context.get("tz"))
 
     # ------------------------------------------------------
     # SQL Constrains
@@ -71,97 +69,93 @@ class HallFlowZone(models.Model):
             if zone_data and zone_data.get("children", False):
                 for child in zone_data.get("children"):
                     child_id = self.with_context(active_test=False).search(
-                        [("wts_id", "=", child)])
+                        [("wts_id", "=", child)]
+                    )
                     if child_id:
                         child_id.parent_id = zone
             # Create/update sensors
             if zone_data and zone_data.get("sensors", False):
                 for sensor in zone_data.get("sensors"):
                     sensor_id = self.sensor_ids.with_context(active_test=False).search(
-                        [("wts_id", "=", sensor)])
+                        [("wts_id", "=", sensor)]
+                    )
                     if not sensor_id:
-                        zone.update({
-                            "sensor_ids": [(0, 0, {
-                                "wts_id": sensor,
-                                "sensor_type": "sensor"
-                            })]
-                        })
+                        zone.update(
+                            {
+                                "sensor_ids": [
+                                    (0, 0, {"wts_id": sensor, "sensor_type": "sensor"})
+                                ]
+                            }
+                        )
 
-    def _wts_create_visitor(
-        self, start_date, end_date, log_id, opening_time=True
+    def _wts_create_entry(
+        self,
+        start_date,
+        end_date,
+        log_id,
+        endpoint,
+        opening_time=True,
     ):
         """
-        Create visitors flows depending on zone
+        Create visitors flows depending on zone by hours
 
         :param datetime start_date: Start date of period
         :param datetime end_date: End date of period
         :param object log_id: Related log
-        :param bool opening_time: Defines if opening times need to be consider or not
+        :param bool opening_time: Defines if opening times need to be considered or not
+        @return flow_ids
         """
         flow_ids = self.env["hall.flow"]
         backend_id = log_id.hall_id.backend_id
+        if endpoint not in ["visitor", "passerby"]:
+            raise ValidationError(_("Wrong endpoint value"))
+
         for zone in self:
             try:
-                visitor_datas = backend_id._get_wifi_datas(
-                    url="/visitor/counthourdetails",
+                wts_datas = backend_id._get_wifi_datas(
+                    url="/%s/countdetails" % (endpoint),
                     zone_ids=zone.mapped("wts_id"),
                     start_date=start_date,
                     end_date=end_date,
                     opening_time=opening_time,
                 )
-                if visitor_datas:
-                    for date, data in visitor_datas.items():
+                if wts_datas:
+                    datas = wts_datas.get("all")
+                    for date, value in datas.items():
+                        log = "Import OK: %s - %s - %s - %s\n" % (
+                            date,
+                            log_id.hall_id.name,
+                            zone.name,
+                            endpoint,
+                        )
+                        print(log)
                         flow_ids += flow_ids.sudo().create(
                             {
-                                "date": local_to_utc(
-                                    datetime.strptime(date, "%Y-%m-%d %H:%M:%S"), zone.timezone
-                                ),
-                                "flow_type": "visitor",
-                                "count": data,
+                                "date": fields.Date.from_string(date),
+                                "flow_type": endpoint,
+                                "count": value,
                                 "log_id": log_id.id,
                                 "hall_id": log_id.hall_id.id,
                                 "zone_id": zone.id,
                             }
                         )
+
+                        if not log_id.log:
+                            log_id.log = log
+                        else:
+                            log_id.log += log
+            except (UniqueViolation, UserError):
+                log = "Already exists: %s - %s - %s - %s\n" % (
+                    date,
+                    log_id.hall_id.name,
+                    zone.name,
+                    endpoint,
+                )
+                if not log_id.log:
+                    log_id.log = log
+                else:
+                    log_id.log += log
             except Exception as e:
                 raise FailedJobError(e.__str__())
 
         return flow_ids
-
-    def _wts_create_passerby(
-        self, start_date, end_date, log_id, opening_time=True
-    ):
-        """
-        Create passer by flows depending on zone
-
-        :param datetime start_date: Start date of period
-        :param datetime end_date: End date of period
-        :param object log_id: Related log
-        :param bool opening_time: Defines if opening times need to be consider or not
-        """
-        backend_id = log_id.hall_id.backend_id
-        flow_ids = self.env["hall.flow"]
-        for zone in self:
-            passerby_datas = backend_id._get_wifi_datas(
-                url="/passerby/counthour",
-                zone_ids=zone.mapped("wts_id"),
-                start_date=start_date,
-                end_date=end_date,
-                opening_time=opening_time,
-            )
-            if passerby_datas:
-                for date, data in passerby_datas.items():
-                    date_passerby = datetime(
-                        start_date.year, start_date.month, start_date.day, int(date), 0, 0
-                    )
-                    flow_ids += flow_ids.create(
-                        {
-                            "date": local_to_utc(date_passerby, zone.timezone),
-                            "flow_type": "passerby",
-                            "count": data,
-                            "log_id": log_id.id,
-                            "hall_id": log_id.hall_id.id,
-                            "zone_id": zone.id,
-                        }
-                    )
-        return flow_ids
diff --git a/views/hall_flow_log.xml b/views/hall_flow_log.xml
index 3008f3f8e111d385049c93137596b751bddba3ec..026d5c431cdfc74939e2638c9503c5686998bccd 100644
--- a/views/hall_flow_log.xml
+++ b/views/hall_flow_log.xml
@@ -8,7 +8,7 @@
         <field name="name">hall.flow.log.tree</field>
         <field name="model">hall.flow.log</field>
         <field name="arch" type="xml">
-            <tree create="0" editable="top">
+            <tree create="0">
                 <field name="date_log" />
                 <field name="hall_id" />
                 <field name="create_uid" />
@@ -17,6 +17,32 @@
             </tree>
         </field>
     </record>
+    <!-- Form View -->
+    <record id="hall_flow_log_form" model="ir.ui.view">
+        <field name="name">hall.flow.log.form</field>
+        <field name="model">hall.flow.log</field>
+        <field name="arch" type="xml">
+            <form>
+                <sheet>
+                    <group>
+                        <group>
+                            <field name="date_log" readonly="1" />
+                            <field name="hall_id" readonly="1" />
+                            <field name="create_uid" readonly="1" />
+                        </group>
+                        <group>
+                            <field name="date_start" readonly="1" />
+                            <field name="date_end" readonly="1" />
+                        </group>
+                    </group>
+                    <separator string="Log info" />
+                    <field name="log" readonly="1" />
+                </sheet>
+
+
+            </form>
+        </field>
+    </record>
 
     <!-- Search View -->
     <record id="hall_flow_log_search" model="ir.ui.view">
@@ -36,6 +62,6 @@
         <field name="name">Log flux</field>
         <field name="type">ir.actions.act_window</field>
         <field name="res_model">hall.flow.log</field>
-        <field name="view_mode">tree</field>
+        <field name="view_mode">tree,form</field>
     </record>
 </odoo>
diff --git a/wizard/hall_flow_import_wizard.py b/wizard/hall_flow_import_wizard.py
index c5e546434d5cb5f406ce8ae9e10b03a8a3ac57bf..8f365c22ec6628766b58a1261327a7d98881cc21 100644
--- a/wizard/hall_flow_import_wizard.py
+++ b/wizard/hall_flow_import_wizard.py
@@ -1,8 +1,7 @@
 # Copyright 2023 Le Filament (https://le-filament.com)
 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)
 
-from odoo import _, fields, models
-from odoo.exceptions import UserError
+from odoo import fields, models
 
 
 class HallFlowImportWizard(models.TransientModel):
@@ -12,14 +11,6 @@ class HallFlowImportWizard(models.TransientModel):
     # ------------------------------------------------------
     # Fields declaration
     # ------------------------------------------------------
-    import_type = fields.Selection(
-        selection=[
-            ("last_import", "Depuis le dernier import"),
-            ("choose_date", "Choisir les dates"),
-        ],
-        string="Import",
-        required=True,
-    )
     hall_id = fields.Many2one(comodel_name="hall.hall", string="Site", required=True)
     date_start = fields.Date("Date de début")
     date_end = fields.Date("Date de fin")
@@ -28,27 +19,13 @@ class HallFlowImportWizard(models.TransientModel):
     # Action functions
     # ------------------------------------------------------
     def action_import_flow(self):
-        Log = self.env["hall.flow.log"].sudo()
-
         # Détermination des dates de début et de fin de requête
-        if self.import_type == "choose_date":
-            start_date = fields.Datetime.start_of(
-                fields.Datetime.to_datetime(self.date_start), granularity="day"
-            )
-            end_date = fields.Datetime.end_of(
-                fields.Datetime.to_datetime(self.date_end), granularity="day"
-            )
-        else:
-            last_log = Log.search([], order="date_start desc", limit=1)
-            if last_log:
-                start_date = fields.Datetime.start_of(
-                    fields.Datetime.add(last_log.end_date, days=1), granularity="day"
-                )
-            else:
-                raise UserError(_("Aucun import n'a été effectué"))
-            yesterday = fields.Datetime.subtract(fields.Datetime.now(), days=1)
-            end_date = fields.Datetime.end_of(yesterday, granularity="day")
-
-        log_ids = Log.import_wts_data(
+        start_date = fields.Datetime.start_of(
+            fields.Datetime.to_datetime(self.date_start), granularity="day"
+        )
+        end_date = fields.Datetime.end_of(
+            fields.Datetime.to_datetime(self.date_end), granularity="day"
+        )
+        self.env["hall.flow.log"].sudo().import_wts_data(
             hall_ids=self.hall_id, start_date=start_date, end_date=end_date
         )
diff --git a/wizard/hall_flow_import_wizard.xml b/wizard/hall_flow_import_wizard.xml
index cb4acdae2f00259fd0a171bc5bdf1880353eb0c4..de2bf6fcf8074c8636b3a83a2126c39b23f15341 100644
--- a/wizard/hall_flow_import_wizard.xml
+++ b/wizard/hall_flow_import_wizard.xml
@@ -11,23 +11,14 @@
                     </div>
                     <group>
                         <group>
-                            <field name="import_type" widget="radio" />
                             <field
                                 name="hall_id"
                                 options="{'no_create': True, 'no_open': True}"
                             />
                         </group>
-                        <group
-                            attrs="{'invisible': [('import_type', '!=', 'choose_date')]}"
-                        >
-                            <field
-                                name="date_start"
-                                attrs="{'required': [('import_type', '=', 'choose_date')]}"
-                            />
-                            <field
-                                name="date_end"
-                                attrs="{'required': [('import_type', '=', 'choose_date')]}"
-                            />
+                        <group>
+                            <field name="date_start" required="1" />
+                            <field name="date_end" required="1" />
                         </group>
                     </group>
                     <footer>