Skip to content

Commit

Permalink
major bug fixes with indexing and transformation methods. Added featu…
Browse files Browse the repository at this point in the history
…re to calculate indicators regional totals without having to specify intermediate and final demand extesions separately
  • Loading branch information
FDonati committed Apr 2, 2020
1 parent 33b40c3 commit 4fe6813
Show file tree
Hide file tree
Showing 9 changed files with 153 additions and 258 deletions.
30 changes: 13 additions & 17 deletions pycirk/fundamental_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def A(inter_coef, D):
Total requirement multipliers
A = Z * D
"""
return np.matmul(inter_coef, D)
return inter_coef @ D

def L(A):
"""
Expand Down Expand Up @@ -157,7 +157,7 @@ def Z(inter_coef, D, diag_q):
Intermediates
Z = inter_coef * D * diag(q)
"""
return np.matmul(inter_coef, D) @ diag_q
return inter_coef @ D @ diag_q

class IOT:
"""
Expand All @@ -170,7 +170,7 @@ def x(Z, Y):
"""
total product output s the sum of Si and y
"""
return np.sum(np.array(Z), axis=1) + np.sum(np.array(Y), axis=1)
return np.sum(Z, axis=1) + np.sum(Y, axis=1)

def B(R, inv_diag_x):
"""
Expand All @@ -189,7 +189,7 @@ def x_IAy(L, y):
Total product ouput
x = inv(I - A) * yi
"""
return np.dot(L, y)
return L @ y

def Z(A, diag_x):
"""
Expand Down Expand Up @@ -257,7 +257,7 @@ def IOT(Z, Y, W, E, R, M):
x = Operations.IOT.x_IAy(L, y)

ver_base = Operations.verifyIOT(Z, Y, E)

return {"A": A,
"Z": Z,
"L": L,
Expand All @@ -277,17 +277,13 @@ def IOT(Z, Y, W, E, R, M):

def calculate_characterized(data):

data.Cr_E = data.Cr_E_k @ np.array(data.E)
data.Cr_M = data.Cr_M_k @ np.array(data.M)
data.Cr_R = data.Cr_R_k @ np.array(data.R)
data.Cr_W = data.Cr_W_k @ np.array(data.W)

data.Cr_EY = data.Cr_E_k @ np.array(data.EY)
data.Cr_MY = data.Cr_M_k @ np.array(data.MY)
data.Cr_RY = data.Cr_R_k @ np.array(data.RY)

data.Cr_tot_E = data.Cr_E.sum(axis=1) + data.Cr_EY.sum(axis=1)
data.Cr_tot_M = data.Cr_M.sum(axis=1) + data.Cr_MY.sum(axis=1)
data.Cr_tot_R = data.Cr_R.sum(axis=1) + data.Cr_RY.sum(axis=1)
data.Cr_E = data.Cr_E_k @ data.E
data.Cr_M = data.Cr_M_k @ data.M
data.Cr_R = data.Cr_R_k @ data.R
data.Cr_W = data.Cr_W_k @ data.W

data.Cr_EY = data.Cr_E_k @ data.EY
data.Cr_MY = data.Cr_M_k @ data.MY
data.Cr_RY = data.Cr_R_k @ data.RY

return data
49 changes: 27 additions & 22 deletions pycirk/labels.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,35 +64,40 @@ def get_unique_labels(self, dataframe_of_labels, for_units=True):
else:
organize[keys] = self.list_of_something(labels)

count = max(len(labels) for keys, labels in organize.items())
organize["count"] = count

try:
organize["count"] = len(organize["synonym"])
except KeyError:
organize["count"] = len(organize["characterization"])

return Munch(organize)

def organize_unique_labels(self, directory):

labels = self.load_labels(directory)

for l, v in labels.items():
labels[l] = Munch(v)
lbl = self.load_labels(directory)

labels = Munch(labels)

labels.products = self.get_unique_labels(labels.prod)
labels.industries = self.get_unique_labels(labels.ind)
self.product_labels = self.get_unique_labels(lbl["prod"])
try:
self.industry_labels = self.get_unique_labels(lbl["ind"])
except AttributeError:
pass

try:
self.country_labels = self.product_labels.country_code
except Exception:
pass

labels.primary = self.get_unique_labels(labels.primary)
labels.fin_dem = self.get_unique_labels(labels.fin_dem)
self.region_labels = self.product_labels.region

labels.emis = self.get_unique_labels(labels.emis, False)
labels.res = self.get_unique_labels(labels.res, False)
labels.mat = self.get_unique_labels(labels.mat, False)
labels.car_emis = self.get_unique_labels(labels.car_emis, False)
labels.car_res = self.get_unique_labels(labels.car_res, False)
labels.car_mat = self.get_unique_labels(labels.car_mat, False)
labels.car_prim = self.get_unique_labels(labels.car_prim, False)
self.W_labels = self.get_unique_labels(lbl["primary"])
self.Y_labels = self.get_unique_labels(lbl["fin_dem"])
self.E_labels = self.get_unique_labels(lbl["emis"], False)
self.R_labels = self.get_unique_labels(lbl["res"], False)
self.M_labels = self.get_unique_labels(lbl["mat"], False)
self.Cr_E_labels = self.get_unique_labels(lbl["car_emis"], False)
self.Cr_R_labels = self.get_unique_labels(lbl["car_res"], False)
self.Cr_M_labels = self.get_unique_labels(lbl["car_mat"], False)
self.Cr_W_labels = self.get_unique_labels(lbl["car_prim"], False)

return labels

def load_labels(self, directory):

Expand Down Expand Up @@ -278,7 +283,7 @@ def identify_labels(self, M_name):

attr_name = name + name_2 + "_labels"
row_labels = eval("self." + attr_name)

no_row_labs = row_labels.count
no_reg_labs = len(reg_labels)
no_col_labs = column_labels.count
Expand Down
142 changes: 19 additions & 123 deletions pycirk/make_scenarios.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def make_counterfactuals(data, scen_no, scen_file, labels):
An object contaning a mofified IO system
"""
# set basic data and variables

print(f"Scenario {scen_no} started")
data = deepcopy(data)

x_ = ops.IOT.x(data.Z, data.Y)
Expand All @@ -65,13 +65,15 @@ def make_counterfactuals(data, scen_no, scen_file, labels):
inv_diag_x_int = np.diag(ops.inv(ops.IOT.x(data.Z, data.Y)))

A = ops.IOT.A(data.Z, inv_diag_x_int)

data.A = counterfactual(scen_file, scen_no, A, "A", labels)

data.Y = counterfactual(scen_file, scen_no, data.Y, "Y", labels)

L = ops.IOT.L(data.A)

x_new = ops.IOT.x_IAy(L, data.Y.sum(1))

diag_x_new = np.diag(x_new)

diag_yj_new = np.diag(data.Y.sum(axis=0))
Expand Down Expand Up @@ -107,13 +109,13 @@ def make_counterfactuals(data, scen_no, scen_file, labels):
data.E = counterfactual(scen_file, scen_no, data.E, "E", labels)
data.R = counterfactual(scen_file, scen_no, data.R, "R", labels)
data.M = counterfactual(scen_file, scen_no, data.M, "M", labels)

# Apply policy to final demand extension coefficient matrices
data.EY = counterfactual(scen_file, scen_no, data.EY, "EY", labels)
data.RY = counterfactual(scen_file, scen_no, data.RY, "RY", labels)
data.MY = counterfactual(scen_file, scen_no, data.MY, "MY", labels)

# print((1-np.sum(x_)/np.sum(x_new))*100)
#print((1-np.sum(x_)/np.sum(x_new))*100)
print(f"Scenario {scen_no} completed")

return data

Expand Down Expand Up @@ -147,8 +149,7 @@ def counterfactual(scen_file, scen_no, M, M_name, labels):
elif scen_no.startswith("scenario_"):
pass
else:
raise KeyError("only integer or explicit name (scenario_x)" +
"are allowed")
raise KeyError("only integer or explicit name (scenario_x) are allowed")


scenario = pd.read_excel(scen_file, sheet_name=scen_no, header=1, index=None)
Expand Down Expand Up @@ -339,7 +340,6 @@ def counterfactual_engine(M, inter_sets, subs=False, copy=False):

int4 = inter_sets["at2"]
int4 = basic_add(int3, int4)

M[np.ix_(i, g)] = int4

if subs is True:
Expand Down Expand Up @@ -422,7 +422,7 @@ def make_new(filtered_changes, M, M_name, labels):
try:
change_type = entry.change_type
ide = entry.identifier # used during debugging

# Collecting the specified coordinates for the intevention
# coordinates for region and category
# Row items (i) => Supplied category or extension category
Expand All @@ -433,11 +433,14 @@ def make_new(filtered_changes, M, M_name, labels):
cat_d = sing_pos(entry.cat_d, column_labels)
# Identify coordinates
orig_coor = coord(cat_o, reg_o, no_reg_labs, no_row_labs)
#print(f"row\n ide: {ide}, row: {entry.reg_o}, {entry.cat_o}, {orig_coor}")
dest_coor = coord(cat_d, reg_d, no_reg_labs, no_col_labs)
#print(f"columns\n ide: {ide}, column: {entry.reg_d}, {entry.cat_d}, {dest_coor}")

# organize main changes
kt1 = {"kt": entry.kt1, "kp": entry.kp1}
kt2 = {"kt": entry.kt2, "kp": entry.kp2}

intervention = {"change_type": change_type,
"ide": ide,
"i": orig_coor,
Expand All @@ -447,32 +450,32 @@ def make_new(filtered_changes, M, M_name, labels):
"at1": entry.at1,
"at2": entry.at2,
}

substitution = False
copy = False

# the following is only relevant for susbtitution
if "x" in [entry.Sub, entry.Copy]:

sub_reg_o = sing_pos(entry.reg_o_sc, reg_labels)
sub_cat_o = sing_pos(entry.cat_o_sc, row_labels)

# Column items => Consumption / manufacturing activity
sub_reg_d = sing_pos(entry.reg_d_sc, reg_labels)
sub_cat_d = sing_pos(entry.cat_d_sc, column_labels)

# Translate coordinates from str to numerical position
sub_orig_coor = coord(sub_cat_o, sub_reg_o, no_reg_labs, no_row_labs)
sub_dest_coor = coord(sub_cat_d, sub_reg_d, no_reg_labs, no_col_labs)

intervention["swk"] = entry.swk
intervention["i1"] = sub_orig_coor
intervention["g1"] = sub_dest_coor
intervention["sk1"] = entry.sk1
intervention["sk2"] = entry.sk2
intervention["sk3"] = entry.sk3
intervention["sk4"] = entry.sk4

if entry.Copy == "x":
copy = True
elif entry.Sub == "x":
Expand All @@ -481,112 +484,5 @@ def make_new(filtered_changes, M, M_name, labels):
raise ValueError(f"Check in this entry for potential coordinate errors in your scenario settings:\n{entry} ")

M = counterfactual_engine(M, intervention, substitution, copy)

return M

# =============================================================================
# =============================================================================
# # Here I put work that I started but I still need to finish
# =============================================================================
# =============================================================================
# =============================================================================
#
#
# def make_counterfactuals_SUT(data, scen_no, scen_file, labels):
# """
# Calculate all the counterfactual SUT matrices
#
# Parameters
# ----------
# data : obj
# An object containing all necessary matrices of the SUT system
#
# scen_no : int
# the identification number of the scenario to reference in scen_file
#
# scen_file : str
# the directory where the scenarios.xlsx file is store
#
# labels : obj
# an object containing all labels for the SUT matrices
#
# Outputs
# -------
# An object contaning a mofified SUT system
# """
#
# met = ops.PxP_ITA_MSC
#
# w = ops.IOT.B(data.W, data.inv_diag_g) # Primary input coef
# e = ops.IOT.B(data.E, data.inv_diag_g) # emissions extension coef
# r = ops.IOT.B(data.R, data.inv_diag_g) # Resources extension coef
# m = ops.IOT.B(data.M, data.inv_diag_g) # Materials extension coef
# S = met.S(data.U, data.inv_diag_g) # industry coefficients for intermediate use table
#
# # Start first from a supply approach
# # Supply matrix counterfactual
# data.V = counterfactual(scen_file, scen_no, data.V, "V", labels)
# # new total industry output
# g1 = np.sum(data.V, axis=0)
# # industry use coefficients counterfactual
# S_ = counterfactual(scen_file, scen_no, S, "S", labels)
#
# data.U = counterfactual(scen_file, scen_no, S_ @ np.diag(g1), "U", labels) # industry use transactions counterfactual
#
# W_ = np.array(ops.IOT.R(w, np.diag(g1)))
#
# g2 = np.array(W_[:9].sum(0)) + data.U.sum(0) # recalculate total industry output
#
# g_dif = np.multiply(g2, ops.inv(g1))*100 # calculate the difference between original and new total industry input
#
# # print([round((1-l)*100,4) for l in g_dif if 1-l>.5e-3 and l!=0])
# q2 = np.sum(data.U, axis=1) + np.sum(data.Y, axis=1)
#
# # updating the supply table to match the new total industry input
# D = met.D(data.V, np.diag(ops.inv(data.V.sum(1))))
# data.V = D @ np.diag(q2)
#
# q1 = np.sum(data.V, axis=0) # total product output
#
# q_dif = np.multiply(q2, ops.inv(q1))
#
# g1 = np.sum(data.V, axis=1)
#
# data.E = met.R(e, np.diag(x))
#
# data.R = met.R(r, np.diag(x))
#
# data.M = met.R(m, np.diag(x))
#
#
# return(IOT)
# def balancing_operation(V, U, Y, W):
# """
# Re-balancing of supply-use tables after data changes
#
# Parameters
# ----------
# V (supply) : numpy.array
#
# U (use) : numpy.array
#
# Y (final_demand) : numpy.array
#
# W (primary_inputs) : numpy.array
#
# Output
# ------
# output : dict
#
# It outputs a dictionary containing a re-balanced supply-use tables system
# where:
# V = supply table
#
# U = use table
#
# Y = final demand
#
# W = primary inputs
#
# """
# =============================================================================
Loading

0 comments on commit 4fe6813

Please sign in to comment.