File size: 5,056 Bytes
0d3476b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import json
import logging
from datetime import datetime, timedelta
from flask import request, jsonify
from dateutil import parser, tz

from routes import app

logger = logging.getLogger(__name__)

def get_next_working_time(dt, work_start_hour, work_end_hour):
    while True:
        if dt.weekday() >= 5:
            # Weekend, move to next working day
            dt += timedelta(days=1)
            dt = dt.replace(hour=work_start_hour, minute=0, second=0, microsecond=0)
        else:
            work_start = dt.replace(hour=work_start_hour, minute=0, second=0, microsecond=0)
            work_end = dt.replace(hour=work_end_hour, minute=0, second=0, microsecond=0)
            if dt < work_start:
                return work_start
            elif dt >= work_end:
                # After working hours, move to next day
                dt += timedelta(days=1)
                dt = dt.replace(hour=work_start_hour, minute=0, second=0, microsecond=0)
            else:
                # During working hours
                return dt

def compute_working_seconds(start_dt, end_dt, work_start_hour, work_end_hour):
    if start_dt >= end_dt:
        return 0

    total_seconds = 0
    current_dt = start_dt

    while current_dt < end_dt:
        if current_dt.weekday() >= 5:
            # Weekend, move to next working day
            current_dt += timedelta(days=1)
            current_dt = current_dt.replace(hour=work_start_hour, minute=0, second=0, microsecond=0)
            continue

        work_start = current_dt.replace(hour=work_start_hour, minute=0, second=0, microsecond=0)
        work_end = current_dt.replace(hour=work_end_hour, minute=0, second=0, microsecond=0)

        if current_dt < work_start:
            current_dt = work_start

        if current_dt >= work_end:
            # Move to next working day
            current_dt += timedelta(days=1)
            continue

        period_end = min(work_end, end_dt)
        total_seconds += (period_end - current_dt).total_seconds()
        current_dt = period_end

    return total_seconds

def get_base_subject(subject):
    while subject.startswith("RE: "):
        subject = subject[4:]
    return subject

@app.route('/mailtime', methods=['POST'])
def mail_time():
    data = request.get_json()
    logging.info("Data received for evaluation: {}".format(data))

    users = {}
    for user in data.get("users", []):
        name = user["name"]
        office_hours = user["officeHours"]
        users[name] = {
            "name": name,
            "timezone": office_hours["timeZone"],
            "start_hour": office_hours["start"],
            "end_hour": office_hours["end"],
            "response_times": []
        }

    # Build threads based on base subjects
    threads = {}
    for email in data.get("emails", []):
        subject = email["subject"]
        base_subject = get_base_subject(subject)
        if base_subject not in threads:
            threads[base_subject] = []
        email['parsed_time'] = parser.isoparse(email['timeSent'])
        threads[base_subject].append(email)

    # Process emails in threads
    for thread_emails in threads.values():
        # Sort emails by parsed_time
        thread_emails.sort(key=lambda e: e['parsed_time'])
        for i in range(1, len(thread_emails)):
            current_email = thread_emails[i]
            previous_email = thread_emails[i - 1]

            sender = current_email['sender']

            sender_info = users[sender]
            sender_tz = tz.gettz(sender_info['timezone'])

            # Time when sender received the previous email, in sender's timezone
            previous_email_time = previous_email['parsed_time'].astimezone(sender_tz)
            # Time when sender sent the reply, in sender's timezone
            current_email_time = current_email['parsed_time'].astimezone(sender_tz)

            work_start_hour = sender_info['start_hour']
            work_end_hour = sender_info['end_hour']

            # Adjusted start time: when sender could start responding
            adjusted_start_time = get_next_working_time(previous_email_time, work_start_hour, work_end_hour)

            # Compute working seconds between adjusted_start_time and current_email_time
            working_seconds = compute_working_seconds(adjusted_start_time, current_email_time, work_start_hour, work_end_hour)
            sender_info['response_times'].append(working_seconds)

    # Compute average response times
    result = {}
    for user_name, user_info in users.items():
        response_times = user_info['response_times']
        if response_times:
            avg_response_time = sum(response_times) / len(response_times)
        else:
            avg_response_time = 0
        avg_response_time = int(round(avg_response_time))
        result[user_name] = avg_response_time

    logging.info("Computed result: {}".format(result))
    return jsonify(result)