aboutsummaryrefslogtreecommitdiffstats
path: root/package/kernel/linux/modules/leds.mk
blob: 7b5609a84ca2e9761c56a902fc6cd140e7cb9ffe (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
#
# Copyright (C) 2006-2011 OpenWrt.org
#
# This is free software, licensed under the GNU General Public License v2.
# See /LICENSE for more information.
#

LEDS_MENU:=LED modules

define KernelPackage/leds-gpio
  SUBMENU:=$(LEDS_MENU)
  TITLE:=GPIO LED support
  DEPENDS:= @GPIO_SUPPORT
  KCONFIG:=CONFIG_LEDS_GPIO
  FILES:=$(LINUX_DIR)/drivers/leds/leds-gpio.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-gpio,1)
endef

define KernelPackage/leds-gpio/description
 Kernel module for LEDs on GPIO lines
endef

$(eval $(call KernelPackage,leds-gpio))

LED_TRIGGER_DIR=$(LINUX_DIR)/drivers/leds/trigger

define KernelPackage/ledtrig-activity
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED Activity Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_ACTIVITY
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-activity.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-activity)
endef

define KernelPackage/ledtrig-activity/description
 Kernel module that allows LEDs to blink based on system load
endef

$(eval $(call KernelPackage,ledtrig-activity))

define KernelPackage/ledtrig-audio
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED Audio Mute Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_AUDIO
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-audio.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-audio)
endef

define KernelPackage/ledtrig-audio/description
 Kernel module that allows LEDs to be controlled by audio drivers
 to follow audio mute and mic-mute changes.
endef

$(eval $(call KernelPackage,ledtrig-audio))

define KernelPackage/ledtrig-gpio
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED GPIO Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_GPIO
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-gpio.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-gpio)
endef

define KernelPackage/ledtrig-gpio/description
 Kernel module that allows LEDs to be controlled by gpio events
endef

$(eval $(call KernelPackage,ledtrig-gpio))


define KernelPackage/ledtrig-transient
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED Transient Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_TRANSIENT
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-transient.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-transient,1)
endef

define KernelPackage/ledtrig-transient/description
 Kernel module that allows LEDs one time activation of a transient state.
endef

$(eval $(call KernelPackage,ledtrig-transient))


define KernelPackage/ledtrig-oneshot
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED One-Shot Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_ONESHOT
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-oneshot.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-oneshot)
endef

define KernelPackage/ledtrig-oneshot/description
 Kernel module that allows LEDs to be triggered by sporadic events in
 one-shot pulses
endef

$(eval $(call KernelPackage,ledtrig-oneshot))


define KernelPackage/ledtrig-pattern
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED Pattern Trigger
  KCONFIG:=CONFIG_LEDS_TRIGGER_PATTERN
  FILES:=$(LED_TRIGGER_DIR)/ledtrig-pattern.ko
  AUTOLOAD:=$(call AutoLoad,50,ledtrig-pattern)
endef

define KernelPackage/ledtrig-pattern/description
 This allows LEDs to be controlled by a software or hardware pattern
 which is a series of tuples, of brightness and duration (ms).
endef

$(eval $(call KernelPackage,ledtrig-pattern))


define KernelPackage/leds-apu
  SUBMENU:=$(LEDS_MENU)
  TITLE:=PC Engines APU1 LED support
  DEPENDS:= @GPIO_SUPPORT @TARGET_x86
  KCONFIG:=CONFIG_LEDS_APU
  FILES:=$(LINUX_DIR)/drivers/leds/leds-apu.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-apu,1)
endef

define KernelPackage/leds-apu/description
  Driver for the PC Engines APU1 LEDs.
endef

$(eval $(call KernelPackage,leds-apu))


define KernelPackage/leds-pca955x
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED driver for PCA955x I2C chips
  DEPENDS:=@GPIO_SUPPORT +kmod-i2c-core
  KCONFIG:=CONFIG_LEDS_PCA955X \
    CONFIG_LEDS_PCA955X_GPIO=y
  FILES:=$(LINUX_DIR)/drivers/leds/leds-pca955x.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-pca955x,1)
endef

define KernelPackage/leds-pca955x/description
 This option enables support for LEDs connected to PCA955x
 LED driver chips accessed via the I2C bus.  Supported
 devices include PCA9550, PCA9551, PCA9552, and PCA9553.
endef

$(eval $(call KernelPackage,leds-pca955x))


define KernelPackage/leds-pca963x
  SUBMENU:=$(LEDS_MENU)
  TITLE:=PCA963x LED support
  DEPENDS:=+kmod-i2c-core
  KCONFIG:=CONFIG_LEDS_PCA963X
  FILES:=$(LINUX_DIR)/drivers/leds/leds-pca963x.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-pca963x,1)
endef

define KernelPackage/leds-pca963x/description
 Driver for the NXP PCA963x I2C LED controllers.
endef

$(eval $(call KernelPackage,leds-pca963x))


define KernelPackage/leds-pwm
  SUBMENU:=$(LEDS_MENU)
  TITLE:=PWM driven LED Support
  KCONFIG:=CONFIG_LEDS_PWM
  DEPENDS:= @PWM_SUPPORT
  FILES:=$(LINUX_DIR)/drivers/leds/leds-pwm.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-pwm,1)
endef

define KernelPackage/leds-pwm/description
 This option enables support for pwm driven LEDs
endef

$(eval $(call KernelPackage,leds-pwm))


define KernelPackage/leds-tlc591xx
  SUBMENU:=$(LEDS_MENU)
  TITLE:=LED driver for TLC59108 and TLC59116 controllers
  DEPENDS:=+kmod-i2c-core +kmod-regmap-i2c
  KCONFIG:=CONFIG_LEDS_TLC591XX
  FILES:=$(LINUX_DIR)/drivers/leds/leds-tlc591xx.ko
  AUTOLOAD:=$(call AutoLoad,60,leds-tlc591xx,1)
endef

define KernelPackage/leds-tlc591xx/description
 This option enables support for Texas Instruments TLC59108
 and TLC59116 LED controllers.
endef

$(eval $(call KernelPackage,leds-tlc591xx))


define KernelPackage/leds-uleds
  SUBMENU:=$(LEDS_MENU)
  TITLE:=Userspace LEDs
  KCONFIG:=CONFIG_LEDS_USER
  FILES:=$(LINUX_DIR)/drivers/leds/uleds.ko
  AUTOLOAD:=$(call AutoLoad,60,uleds,1)
endef

define KernelPackage/leds-uleds/description
 This option enables support for userspace LEDs.
endef

$(eval $(call KernelPackage,leds-uleds))


define KernelPackage/input-leds
  SUBMENU:=$(LEDS_MENU)
  TITLE:=Input device LED support
  DEPENDS:=+kmod-input-core
  KCONFIG:=CONFIG_INPUT_LEDS
  FILES:=$(LINUX_DIR)/drivers/input/input-leds.ko
  AUTOLOAD:=$(call AutoLoad,50,input-leds,1)
endef

define KernelPackage/input-leds/description
 Provides support for LEDs on input devices- for example,
 keyboard num/caps/scroll lock.
endef

$(eval $(call KernelPackage,input-leds))
49 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939 940 941 942 943 944 945 946 947 948 949 950 951 952 953 954 955 956 957 958 959 960 961 962 963 964 965 966 967 968 969 970 971 972 973 974 975 976 977 978 979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037 1038 1039 1040 1041 1042 1043 1044 1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067 1068 1069 1070 1071 1072 1073 1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120 1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199
"""
    This module provides more sophisticated flow tracking and provides filtering and interception facilities.
"""
from __future__ import absolute_import

import traceback
from abc import abstractmethod, ABCMeta
import hashlib
import sys

import six
from six.moves import http_cookies, http_cookiejar, urllib
import os
import re

from typing import List, Optional, Set

from netlib import wsgi, odict
from netlib.exceptions import HttpException
from netlib.http import Headers, http1, cookies
from . import controller, tnetstring, filt, script, version, flow_format_compat
from .onboarding import app
from .proxy.config import HostMatcher
from .protocol.http_replay import RequestReplayThread
from .exceptions import Kill, FlowReadException
from .models import ClientConnection, ServerConnection, HTTPFlow, HTTPRequest, FLOW_TYPES
from collections import defaultdict


class AppRegistry:

    def __init__(self):
        self.apps = {}

    def add(self, app, domain, port):
        """
            Add a WSGI app to the registry, to be served for requests to the
            specified domain, on the specified port.
        """
        self.apps[(domain, port)] = wsgi.WSGIAdaptor(
            app,
            domain,
            port,
            version.NAMEVERSION
        )

    def get(self, request):
        """
            Returns an WSGIAdaptor instance if request matches an app, or None.
        """
        if (request.host, request.port) in self.apps:
            return self.apps[(request.host, request.port)]
        if "host" in request.headers:
            host = request.headers["host"]
            return self.apps.get((host, request.port), None)


class ReplaceHooks:

    def __init__(self):
        self.lst = []

    def set(self, r):
        self.clear()
        for i in r:
            self.add(*i)

    def add(self, fpatt, rex, s):
        """
            add a replacement hook.

            fpatt: a string specifying a filter pattern.
            rex: a regular expression.
            s: the replacement string

            returns true if hook was added, false if the pattern could not be
            parsed.
        """
        cpatt = filt.parse(fpatt)
        if not cpatt:
            return False
        try:
            re.compile(rex)
        except re.error:
            return False
        self.lst.append((fpatt, rex, s, cpatt))
        return True

    def get_specs(self):
        """
            Retrieve the hook specifcations. Returns a list of (fpatt, rex, s)
            tuples.
        """
        return [i[:3] for i in self.lst]

    def count(self):
        return len(self.lst)

    def run(self, f):
        for _, rex, s, cpatt in self.lst:
            if cpatt(f):
                if f.response:
                    f.response.replace(rex, s)
                else:
                    f.request.replace(rex, s)

    def clear(self):
        self.lst = []


class SetHeaders:

    def __init__(self):
        self.lst = []

    def set(self, r):
        self.clear()
        for i in r:
            self.add(*i)

    def add(self, fpatt, header, value):
        """
            Add a set header hook.

            fpatt: String specifying a filter pattern.
            header: Header name.
            value: Header value string

            Returns True if hook was added, False if the pattern could not be
            parsed.
        """
        cpatt = filt.parse(fpatt)
        if not cpatt:
            return False
        self.lst.append((fpatt, header, value, cpatt))
        return True

    def get_specs(self):
        """
            Retrieve the hook specifcations. Returns a list of (fpatt, rex, s)
            tuples.
        """
        return [i[:3] for i in self.lst]

    def count(self):
        return len(self.lst)

    def clear(self):
        self.lst = []

    def run(self, f):
        for _, header, value, cpatt in self.lst:
            if cpatt(f):
                if f.response:
                    f.response.headers.pop(header, None)
                else:
                    f.request.headers.pop(header, None)
        for _, header, value, cpatt in self.lst:
            if cpatt(f):
                if f.response:
                    f.response.headers.add(header, value)
                else:
                    f.request.headers.add(header, value)


class StreamLargeBodies(object):

    def __init__(self, max_size):
        self.max_size = max_size

    def run(self, flow, is_request):
        r = flow.request if is_request else flow.response
        expected_size = http1.expected_http_body_size(
            flow.request, flow.response if not is_request else None
        )
        if not r.content and not (0 <= expected_size <= self.max_size):
            # r.stream may already be a callable, which we want to preserve.
            r.stream = r.stream or True


class ClientPlaybackState:

    def __init__(self, flows, exit):
        self.flows, self.exit = flows, exit
        self.current = None
        self.testing = False  # Disables actual replay for testing.

    def count(self):
        return len(self.flows)

    def done(self):
        if len(self.flows) == 0 and not self.current:
            return True
        return False

    def clear(self, flow):
        """
           A request has returned in some way - if this is the one we're
           servicing, go to the next flow.
        """
        if flow is self.current:
            self.current = None

    def tick(self, master):
        if self.flows and not self.current:
            self.current = self.flows.pop(0).copy()
            if not self.testing:
                master.replay_request(self.current)
            else:
                self.current.reply = controller.DummyReply()
                master.handle_request(self.current)
                if self.current.response:
                    master.handle_response(self.current)


class ServerPlaybackState:

    def __init__(
            self,
            headers,
            flows,
            exit,
            nopop,
            ignore_params,
            ignore_content,
            ignore_payload_params,
            ignore_host):
        """
            headers: Case-insensitive list of request headers that should be
            included in request-response matching.
        """
        self.headers = headers
        self.exit = exit
        self.nopop = nopop
        self.ignore_params = ignore_params
        self.ignore_content = ignore_content
        self.ignore_payload_params = ignore_payload_params
        self.ignore_host = ignore_host
        self.fmap = {}
        for i in flows:
            if i.response:
                l = self.fmap.setdefault(self._hash(i), [])
                l.append(i)

    def count(self):
        return sum(len(i) for i in self.fmap.values())

    def _hash(self, flow):
        """
            Calculates a loose hash of the flow request.
        """
        r = flow.request

        _, _, path, _, query, _ = urllib.parse.urlparse(r.url)
        queriesArray = urllib.parse.parse_qsl(query, keep_blank_values=True)

        key = [
            str(r.port),
            str(r.scheme),
            str(r.method),
            str(path),
        ]

        if not self.ignore_content:
            form_contents = r.urlencoded_form or r.multipart_form
            if self.ignore_payload_params and form_contents:
                key.extend(
                    p for p in form_contents.items(multi=True)
                    if p[0] not in self.ignore_payload_params
                )
            else:
                key.append(str(r.content))

        if not self.ignore_host:
            key.append(r.host)

        filtered = []
        ignore_params = self.ignore_params or []
        for p in queriesArray:
            if p[0] not in ignore_params:
                filtered.append(p)
        for p in filtered:
            key.append(p[0])
            key.append(p[1])

        if self.headers:
            headers = []
            for i in self.headers:
                v = r.headers.get(i)
                headers.append((i, v))
            key.append(headers)
        return hashlib.sha256(repr(key)).digest()

    def next_flow(self, request):
        """
            Returns the next flow object, or None if no matching flow was
            found.
        """
        l = self.fmap.get(self._hash(request))
        if not l:
            return None

        if self.nopop:
            return l[0]
        else:
            return l.pop(0)


class StickyCookieState:

    def __init__(self, flt):
        """
            flt: Compiled filter.
        """
        self.jar = defaultdict(dict)
        self.flt = flt

    def ckey(self, attrs, f):
        """
            Returns a (domain, port, path) tuple.
        """
        domain = f.request.host
        path = "/"
        if "domain" in attrs:
            domain = attrs["domain"]
        if "path" in attrs:
            path = attrs["path"]
        return (domain, f.request.port, path)

    def domain_match(self, a, b):
        if http_cookiejar.domain_match(a, b):
            return True
        elif http_cookiejar.domain_match(a, b.strip(".")):
            return True
        return False

    def handle_response(self, f):
        for name, (value, attrs) in f.response.cookies.items(multi=True):
            # FIXME: We now know that Cookie.py screws up some cookies with
            # valid RFC 822/1123 datetime specifications for expiry. Sigh.
            a = self.ckey(attrs, f)
            if self.domain_match(f.request.host, a[0]):
                b = attrs.with_insert(0, name, value)
                self.jar[a][name] = b

    def handle_request(self, f):
        l = []
        if f.match(self.flt):
            for domain, port, path in self.jar.keys():
                match = [
                    self.domain_match(f.request.host, domain),
                    f.request.port == port,
                    f.request.path.startswith(path)
                ]
                if all(match):
                    c = self.jar[(domain, port, path)]
                    l.extend([cookies.format_cookie_header(c[name].items(multi=True)) for name in c.keys()])
        if l:
            f.request.stickycookie = True
            f.request.headers["cookie"] = "; ".join(l)


class StickyAuthState:

    def __init__(self, flt):
        """
            flt: Compiled filter.
        """
        self.flt = flt
        self.hosts = {}

    def handle_request(self, f):
        host = f.request.host
        if "authorization" in f.request.headers:
            self.hosts[host] = f.request.headers["authorization"]
        elif f.match(self.flt):
            if host in self.hosts:
                f.request.headers["authorization"] = self.hosts[host]


@six.add_metaclass(ABCMeta)
class FlowList(object):

    def __init__(self):
        self._list = []  # type: List[Flow]

    def __iter__(self):
        return iter(self._list)

    def __contains__(self, item):
        return item in self._list

    def __getitem__(self, item):
        return self._list[item]

    def __bool__(self):
        return bool(self._list)

    if six.PY2:
        __nonzero__ = __bool__

    def __len__(self):
        return len(self._list)

    def index(self, f):
        return self._list.index(f)

    @abstractmethod
    def _add(self, f):
        return

    @abstractmethod
    def _update(self, f):
        return

    @abstractmethod
    def _remove(self, f):
        return


class FlowView(FlowList):

    def __init__(self, store, filt=None):
        super(FlowView, self).__init__()
        if not filt:
            filt = lambda flow: True
        self._build(store, filt)

        self.store = store
        self.store.views.append(self)

    def _close(self):
        self.store.views.remove(self)

    def _build(self, flows, filt=None):
        if filt:
            self.filt = filt
        self._list = list(filter(self.filt, flows))

    def _add(self, f):
        if self.filt(f):
            self._list.append(f)

    def _update(self, f):
        if f not in self._list:
            self._add(f)
        elif not self.filt(f):
            self._remove(f)

    def _remove(self, f):
        if f in self._list:
            self._list.remove(f)

    def _recalculate(self, flows):
        self._build(flows)


class FlowStore(FlowList):

    """
    Responsible for handling flows in the state:
    Keeps a list of all flows and provides views on them.
    """

    def __init__(self):
        super(FlowStore, self).__init__()
        self._set = set()  # Used for O(1) lookups
        self.views = []
        self._recalculate_views()

    def get(self, flow_id):
        for f in self._list:
            if f.id == flow_id:
                return f

    def __contains__(self, f):
        return f in self._set

    def _add(self, f):
        """
        Adds a flow to the state.
        The flow to add must not be present in the state.
        """
        self._list.append(f)
        self._set.add(f)
        for view in self.views:
            view._add(f)

    def _update(self, f):
        """
        Notifies the state that a flow has been updated.
        The flow must be present in the state.
        """
        if f in self:
            for view in self.views:
                view._update(f)

    def _remove(self, f):
        """
        Deletes a flow from the state.
        The flow must be present in the state.
        """
        self._list.remove(f)
        self._set.remove(f)
        for view in self.views:
            view._remove(f)

    # Expensive bulk operations

    def _extend(self, flows):
        """
        Adds a list of flows to the state.
        The list of flows to add must not contain flows that are already in the state.
        """
        self._list.extend(flows)
        self._set.update(flows)
        self._recalculate_views()

    def _clear(self):
        self._list = []
        self._set = set()
        self._recalculate_views()

    def _recalculate_views(self):
        """
        Expensive operation: Recalculate all the views after a bulk change.
        """
        for view in self.views:
            view._recalculate(self)

    # Utility functions.
    # There are some common cases where we need to argue about all flows
    # irrespective of filters on the view etc (i.e. on shutdown).

    def active_count(self):
        c = 0
        for i in self._list:
            if not i.response and not i.error:
                c += 1
        return c

    # TODO: Should accept_all operate on views or on all flows?
    def accept_all(self, master):
        for f in self._list:
            f.accept_intercept(master)

    def kill_all(self, master):
        for f in self._list:
            f.kill(master)


class State(object):

    def __init__(self):
        self.flows = FlowStore()
        self.view = FlowView(self.flows, None)

        # These are compiled filt expressions:
        self.intercept = None

    @property
    def limit_txt(self):
        return getattr(self.view.filt, "pattern", None)

    def flow_count(self):
        return len(self.flows)

    # TODO: All functions regarding flows that don't cause side-effects should
    # be moved into FlowStore.
    def index(self, f):
        return self.flows.index(f)

    def active_flow_count(self):
        return self.flows.active_count()

    def add_flow(self, f):
        """
            Add a request to the state.
        """
        self.flows._add(f)
        return f

    def update_flow(self, f):
        """
            Add a response to the state.
        """
        self.flows._update(f)
        return f

    def delete_flow(self, f):
        self.flows._remove(f)

    def load_flows(self, flows):
        self.flows._extend(flows)

    def set_limit(self, txt):
        if txt == self.limit_txt:
            return
        if txt:
            f = filt.parse(txt)
            if not f:
                return "Invalid filter expression."
            self.view._close()
            self.view = FlowView(self.flows, f)
        else:
            self.view._close()
            self.view = FlowView(self.flows, None)

    def set_intercept(self, txt):
        if txt:
            f = filt.parse(txt)
            if not f:
                return "Invalid filter expression."
            self.intercept = f
        else:
            self.intercept = None

    @property
    def intercept_txt(self):
        return getattr(self.intercept, "pattern", None)

    def clear(self):
        self.flows._clear()

    def accept_all(self, master):
        self.flows.accept_all(master)

    def backup(self, f):
        f.backup()
        self.update_flow(f)

    def revert(self, f):
        f.revert()
        self.update_flow(f)

    def killall(self, master):
        self.flows.kill_all(master)


class FlowMaster(controller.ServerMaster):

    @property
    def server(self):
        # At some point, we may want to have support for multiple servers.
        # For now, this suffices.
        if len(self.servers) > 0:
            return self.servers[0]

    def __init__(self, server, state):
        super(FlowMaster, self).__init__()
        if server:
            self.add_server(server)
        self.state = state
        self.server_playback = None
        self.client_playback = None
        self.kill_nonreplay = False
        self.scripts = []  # type: List[script.Script]
        self.pause_scripts = False

        self.stickycookie_state = None  # type: Optional[StickyCookieState]
        self.stickycookie_txt = None

        self.stickyauth_state = False  # type: Optional[StickyAuthState]
        self.stickyauth_txt = None

        self.anticache = False
        self.anticomp = False
        self.stream_large_bodies = None  # type: Optional[StreamLargeBodies]
        self.refresh_server_playback = False
        self.replacehooks = ReplaceHooks()
        self.setheaders = SetHeaders()
        self.replay_ignore_params = False
        self.replay_ignore_content = None
        self.replay_ignore_host = False

        self.stream = None
        self.apps = AppRegistry()

    def start_app(self, host, port):
        self.apps.add(
            app.mapp,
            host,
            port
        )

    def add_event(self, e, level="info"):
        """
            level: debug, info, error
        """

    def unload_scripts(self):
        for s in self.scripts[:]:
            self.unload_script(s)

    def unload_script(self, script_obj):
        try:
            script_obj.unload()
        except script.ScriptException as e:
            self.add_event("Script error:\n" + str(e), "error")
        script.reloader.unwatch(script_obj)
        self.scripts.remove(script_obj)

    def load_script(self, command, use_reloader=False):
        """
            Loads a script.

            Raises:
                ScriptException
        """
        s = script.Script(command, script.ScriptContext(self))
        s.load()
        if use_reloader:
            script.reloader.watch(s, lambda: self.event_queue.put(("script_change", s)))
        self.scripts.append(s)

    def _run_single_script_hook(self, script_obj, name, *args, **kwargs):
        if script_obj and not self.pause_scripts:
            try:
                script_obj.run(name, *args, **kwargs)
            except script.ScriptException as e:
                self.add_event("Script error:\n{}".format(e), "error")

    def run_script_hook(self, name, *args, **kwargs):
        for script_obj in self.scripts:
            self._run_single_script_hook(script_obj, name, *args, **kwargs)

    def get_ignore_filter(self):
        return self.server.config.check_ignore.patterns

    def set_ignore_filter(self, host_patterns):
        self.server.config.check_ignore = HostMatcher(host_patterns)

    def get_tcp_filter(self):
        return self.server.config.check_tcp.patterns

    def set_tcp_filter(self, host_patterns):
        self.server.config.check_tcp = HostMatcher(host_patterns)

    def set_stickycookie(self, txt):
        if txt:
            flt = filt.parse(txt)
            if not flt:
                return "Invalid filter expression."
            self.stickycookie_state = StickyCookieState(flt)
            self.stickycookie_txt = txt
        else:
            self.stickycookie_state = None
            self.stickycookie_txt = None

    def set_stream_large_bodies(self, max_size):
        if max_size is not None:
            self.stream_large_bodies = StreamLargeBodies(max_size)
        else:
            self.stream_large_bodies = False

    def set_stickyauth(self, txt):
        if txt:
            flt = filt.parse(txt)
            if not flt:
                return "Invalid filter expression."
            self.stickyauth_state = StickyAuthState(flt)
            self.stickyauth_txt = txt
        else:
            self.stickyauth_state = None
            self.stickyauth_txt = None

    def start_client_playback(self, flows, exit):
        """
            flows: List of flows.
        """
        self.client_playback = ClientPlaybackState(flows, exit)

    def stop_client_playback(self):
        self.client_playback = None

    def start_server_playback(
            self,
            flows,
            kill,
            headers,
            exit,
            nopop,
            ignore_params,
            ignore_content,
            ignore_payload_params,
            ignore_host):
        """
            flows: List of flows.
            kill: Boolean, should we kill requests not part of the replay?
            ignore_params: list of parameters to ignore in server replay
            ignore_content: true if request content should be ignored in server replay
            ignore_payload_params: list of content params to ignore in server replay
            ignore_host: true if request host should be ignored in server replay
        """
        self.server_playback = ServerPlaybackState(
            headers,
            flows,
            exit,
            nopop,
            ignore_params,
            ignore_content,
            ignore_payload_params,
            ignore_host)
        self.kill_nonreplay = kill

    def stop_server_playback(self):
        self.server_playback = None

    def do_server_playback(self, flow):
        """
            This method should be called by child classes in the handle_request
            handler. Returns True if playback has taken place, None if not.
        """
        if self.server_playback:
            rflow = self.server_playback.next_flow(flow)
            if not rflow:
                return None
            response = rflow.response.copy()
            response.is_replay = True
            if self.refresh_server_playback:
                response.refresh()
            flow.response = response
            return True
        return None

    def tick(self, timeout):
        if self.client_playback:
            stop = (
                self.client_playback.done() and
                self.state.active_flow_count() == 0
            )
            exit = self.client_playback.exit
            if stop:
                self.stop_client_playback()
                if exit:
                    self.shutdown()
            else:
                self.client_playback.tick(self)

        if self.server_playback:
            stop = (
                self.server_playback.count() == 0 and
                self.state.active_flow_count() == 0 and
                not self.kill_nonreplay
            )
            exit = self.server_playback.exit
            if stop:
                self.stop_server_playback()
                if exit:
                    self.shutdown()
        return super(FlowMaster, self).tick(timeout)

    def duplicate_flow(self, f):
        f2 = f.copy()
        self.load_flow(f2)
        return f2

    def create_request(self, method, scheme, host, port, path):
        """
            this method creates a new artificial and minimalist request also adds it to flowlist
        """
        c = ClientConnection.make_dummy(("", 0))
        s = ServerConnection.make_dummy((host, port))

        f = HTTPFlow(c, s)
        headers = Headers()

        req = HTTPRequest(
            "absolute",
            method,
            scheme,
            host,
            port,
            path,
            b"HTTP/1.1",
            headers,
            b""
        )
        f.request = req
        self.load_flow(f)
        return f

    def load_flow(self, f):
        """
        Loads a flow
        """
        if isinstance(f, HTTPFlow):
            if self.server and self.server.config.mode == "reverse":
                f.request.host = self.server.config.upstream_server.address.host
                f.request.port = self.server.config.upstream_server.address.port
                f.request.scheme = self.server.config.upstream_server.scheme

            f.reply = controller.DummyReply()
            if f.request:
                self.handle_request(f)
            if f.response:
                self.handle_responseheaders(f)
                self.handle_response(f)
            if f.error:
                self.handle_error(f)
        else:
            raise NotImplementedError()

    def load_flows(self, fr):
        """
            Load flows from a FlowReader object.
        """
        cnt = 0
        for i in fr.stream():
            cnt += 1
            self.load_flow(i)
        return cnt

    def load_flows_file(self, path):
        path = os.path.expanduser(path)
        try:
            if path == "-":
                # This is incompatible with Python 3 - maybe we can use click?
                freader = FlowReader(sys.stdin)
                return self.load_flows(freader)
            else:
                with open(path, "rb") as f:
                    freader = FlowReader(f)
                    return self.load_flows(freader)
        except IOError as v:
            raise FlowReadException(v.strerror)

    def process_new_request(self, f):
        if self.stickycookie_state:
            self.stickycookie_state.handle_request(f)
        if self.stickyauth_state:
            self.stickyauth_state.handle_request(f)

        if self.anticache:
            f.request.anticache()
        if self.anticomp:
            f.request.anticomp()

        if self.server_playback:
            pb = self.do_server_playback(f)
            if not pb and self.kill_nonreplay:
                f.kill(self)

    def process_new_response(self, f):
        if self.stickycookie_state:
            self.stickycookie_state.handle_response(f)

    def replay_request(self, f, block=False, run_scripthooks=True):
        """
            Returns None if successful, or error message if not.
        """
        if f.live and run_scripthooks:
            return "Can't replay live request."
        if f.intercepted:
            return "Can't replay while intercepting..."
        if f.request.content is None:
            return "Can't replay request with missing content..."
        if f.request:
            f.backup()
            f.request.is_replay = True
            if "Content-Length" in f.request.headers:
                f.request.headers["Content-Length"] = str(len(f.request.content))
            f.response = None
            f.error = None
            self.process_new_request(f)
            rt = RequestReplayThread(
                self.server.config,
                f,
                self.event_queue if run_scripthooks else False,
                self.should_exit
            )
            rt.start()  # pragma: no cover
            if block:
                rt.join()

    def handle_log(self, l):
        self.add_event(l.msg, l.level)
        l.reply()

    def handle_clientconnect(self, root_layer):
        self.run_script_hook("clientconnect", root_layer)
        root_layer.reply()

    def handle_clientdisconnect(self, root_layer):
        self.run_script_hook("clientdisconnect", root_layer)
        root_layer.reply()

    def handle_serverconnect(self, server_conn):
        self.run_script_hook("serverconnect", server_conn)
        server_conn.reply()

    def handle_serverdisconnect(self, server_conn):
        self.run_script_hook("serverdisconnect", server_conn)
        server_conn.reply()

    def handle_next_layer(self, top_layer):
        self.run_script_hook("next_layer", top_layer)
        top_layer.reply()

    def handle_error(self, f):
        self.state.update_flow(f)
        self.run_script_hook("error", f)
        if self.client_playback:
            self.client_playback.clear(f)
        f.reply()
        return f

    def handle_request(self, f):
        if f.live:
            app = self.apps.get(f.request)
            if app:
                err = app.serve(
                    f,
                    f.client_conn.wfile,
                    **{"mitmproxy.master": self}
                )
                if err:
                    self.add_event("Error in wsgi app. %s" % err, "error")
                f.reply(Kill)
                return
        if f not in self.state.flows:  # don't add again on replay
            self.state.add_flow(f)
        self.replacehooks.run(f)
        self.setheaders.run(f)
        self.process_new_request(f)
        self.run_script_hook("request", f)
        return f

    def handle_responseheaders(self, f):
        try:
            if self.stream_large_bodies:
                self.stream_large_bodies.run(f, False)
        except HttpException:
            f.reply(Kill)
            return

        self.run_script_hook("responseheaders", f)

        f.reply()
        return f

    def handle_response(self, f):
        self.state.update_flow(f)
        self.replacehooks.run(f)
        self.setheaders.run(f)
        self.run_script_hook("response", f)
        if self.client_playback:
            self.client_playback.clear(f)
        self.process_new_response(f)
        if self.stream:
            self.stream.add(f)
        return f

    def handle_intercept(self, f):
        self.state.update_flow(f)

    def handle_accept_intercept(self, f):
        self.state.update_flow(f)

    def handle_script_change(self, s):
        """
        Handle a script whose contents have been changed on the file system.

        Args:
            s (script.Script): the changed script

        Returns:
            True, if reloading was successful.
            False, otherwise.
        """
        ok = True
        # We deliberately do not want to fail here.
        # In the worst case, we have an "empty" script object.
        try:
            s.unload()
        except script.ScriptException as e:
            ok = False
            self.add_event('Error reloading "{}":\n{}'.format(s.filename, e), 'error')
        try:
            s.load()
        except script.ScriptException as e:
            ok = False
            self.add_event('Error reloading "{}":\n{}'.format(s.filename, e), 'error')
        else:
            self.add_event('"{}" reloaded.'.format(s.filename), 'info')
        return ok

    def handle_tcp_message(self, m):
        self.run_script_hook("tcp_message", m)
        m.reply()

    def shutdown(self):
        super(FlowMaster, self).shutdown()

        # Add all flows that are still active
        if self.stream:
            for i in self.state.flows:
                if not i.response:
                    self.stream.add(i)
            self.stop_stream()

        self.unload_scripts()

    def start_stream(self, fp, filt):
        self.stream = FilteredFlowWriter(fp, filt)

    def stop_stream(self):
        self.stream.fo.close()
        self.stream = None

    def start_stream_to_path(self, path, mode="wb", filt=None):
        path = os.path.expanduser(path)
        try:
            f = open(path, mode)
            self.start_stream(f, filt)
        except IOError as v:
            return str(v)
        self.stream_path = path


def read_flows_from_paths(paths):
    """
    Given a list of filepaths, read all flows and return a list of them.
    From a performance perspective, streaming would be advisable -
    however, if there's an error with one of the files, we want it to be raised immediately.

    Raises:
        FlowReadException, if any error occurs.
    """
    try:
        flows = []
        for path in paths:
            path = os.path.expanduser(path)
            with open(path, "rb") as f:
                flows.extend(FlowReader(f).stream())
    except IOError as e:
        raise FlowReadException(e.strerror)
    return flows


class FlowWriter:

    def __init__(self, fo):
        self.fo = fo

    def add(self, flow):
        d = flow.get_state()
        tnetstring.dump(d, self.fo)


class FlowReader:

    def __init__(self, fo):
        self.fo = fo

    def stream(self):
        """
            Yields Flow objects from the dump.
        """

        # There is a weird mingw bug that breaks .tell() when reading from stdin.
        try:
            self.fo.tell()
        except IOError:  # pragma: no cover
            can_tell = False
        else:
            can_tell = True

        off = 0
        try:
            while True:
                data = tnetstring.load(self.fo)
                try:
                    data = flow_format_compat.migrate_flow(data)
                except ValueError as e:
                    raise FlowReadException(str(e))
                if can_tell:
                    off = self.fo.tell()
                if data["type"] not in FLOW_TYPES:
                    raise FlowReadException("Unknown flow type: {}".format(data["type"]))
                yield FLOW_TYPES[data["type"]].from_state(data)
        except ValueError:
            # Error is due to EOF
            if can_tell and self.fo.tell() == off and self.fo.read() == '':
                return
            raise FlowReadException("Invalid data format.")


class FilteredFlowWriter:

    def __init__(self, fo, filt):
        self.fo = fo
        self.filt = filt

    def add(self, f):
        if self.filt and not f.match(self.filt):
            return
        d = f.get_state()
        tnetstring.dump(d, self.fo)