global_state = {'value': 1}def add1(a):return a + global_state['value']def add2(a):return a +1def add3(a): out = a +1print(out)return out
Knowledge Check
Which of these functions are pure?
def simple_range(up_to): out = []for a inrange(up_to): out.append(a)return outdef other_defaults(arg1=1, some_list=[1,2,3]): out = []for val in some_list: out.append(arg1 + val)
# never give a lambda a namefunc1 =lambda x, y, z: x + y + z# use a def insteaddef func1(x, y, z):return x + y + z
Map
Apply a function to each element of a sequence
my_range =range(10)# square the listout =map(lambda x: x*x, my_range)
out = (x*x for x inrange(10))
Map
from itertools import repeatfrom random import randrangearr_1 =map(randrange, repeat(10, 10))
from random import randrangearr_1 = [randrange(10) for _ inrange(10)]
Reduce
Apply a function to first two elements, then the result to the third, etc.
from functools importreducemy_range =range(1, 6)# square the listout =reduce(lambda x,y: x*y, my_range)
Accumulate
Like reduce, but intermediate values are stored (e.g., cumulative max)
from itertools import accumulatefrom random import randrangerand_ints = (randrange(1000) for _ inrange(10))result = accumulate(rand_ints, max)
Recursion
When a function calls into itself. Has a base case and at least one recursive case.
def fibonacci(n):if n <=1:return n fib1 = fibonacci(n-1) fib2 = fibonacci(n-2)return fib1 + fib2
Recursion
Recursion:
Works well for parsing recursive data structures (e.g., trees)
Easy to overflow the stack
Can be difficult to understand, should be used sparingly
Good for showing off, coding interviews
Generators
A function which suspends and resumes state
Useful for memory conservation (large or infinite lists)
Capable of two-way communication (premise of python async)
Implements the Iterable and Iterators protocol
Generators
def generator(): val =0whileTrue: val +=1yield valiterable_thing = generator()# no get_itemiterable_thing[0] # raise TypeError# but it is *iterable*for val in iterable_thing: print(val)
Generators
def generator(): val =0whileTrue: val +=1yield val# Also iteratoriterator = generator()val1 =next(iterator) # 1val2 =next(iterator) # 2val3 =next(iterator) # 3# raises StopIteration when exhausted
Yield From
def generator_0(iterable):yield iterableinput_list = [1, 2, 3]one_el =next(generator_0(input_list))total = [x for x in generator_0(input_list)]listout =list(generator(input_list)
Yield From
def generator_1(iterable):for a in iterable:yield ainput_list = [1, 2, 3]one_el =next(generator_1(input_list))total = [x for x in generator_1(input_list)]listout =list(generator(input_list)
Yield From
yield from drives iteration
def generator_1(iterable):for a in iterable:yield adef generator_2(iterable):yieldfrom iterable
my_dict = {1: 1, 2: 2}items = my_dict.items() generator_comp = (x for x inrange(10))
Context Mangers
Uses the with keyword
Useful to manage setup/teardown for different contexts
withopen('some_file.txt', 'w') as fi: fi.write('my txt')...
Context Mangers
from contextlib import contextmanager@contextmanagerdef my_open(filename, mode): fi =open(filename, mode)yield fi fi.close()with my_open('test_file.txt', 'w') as fi: fi.write('my txt')...
Context Mangers
Class context managers are more powerful
class MyOpen:def__init__(self, path, mode):self._path = pathself._mode = modeself._fi =Nonedef__enter__(self):self._fi =open(self._path)returnself._fidef__exit__(self, exc_type, exc_value, exc_tb):self._fi.close()with MyOpen('test_file.txt', 'w') as fi: fi.write('my txt')...
Summary
Pure function and unchangeable data are advantageous
Avoid shared mutable state (when possible)
Functions can be passed to functions
Decorators mark or modify a callable
Recursion is cool, but not usually the simplest approach
Generators suspend function state
Context managers are a clean way to handle setup/teardown