1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """ Set of Tool in order to modify a given UFO model.
16 (mainly by adding-suppressing interactions and allow to modify by text the
17 different part of the model. Check of consistency of the model are performed.
18 This produce a new valid UFO model in output.
19 """
20
21 import glob
22 import logging
23 import os
24 import re
25 import sys
26
27 import madgraph.iolibs.files as files
28 import madgraph.various.misc as misc
29 import models as ufomodels
30 import models.import_ufo as import_ufo
31 import models.check_param_card as check_param_card
32
33 pjoin =os.path.join
34 logger = logging.getLogger('madgraph.model')
35
37
39 """ The class storing the current status of the model """
40
41 - def __init__(self, modelpath, addon='__1'):
42 """load the model from a valid UFO directory (otherwise keep everything
43 as empty."""
44
45 self.modelpath = modelpath
46 model = ufomodels.load_model(modelpath)
47
48
49 if not hasattr(model, 'all_orders'):
50 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
51 'MG5 is able to load such model but NOT to the add model feature.'
52 if isinstance(model.all_particles[0].mass, basestring):
53 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
54 'MG5 is able to load such model but NOT to the add model feature.'
55
56
57 self.particles = model.all_particles
58 if any(hasattr(p, 'loop_particles') for p in self.particles):
59 raise USRMODERROR, 'Base Model doesn\'t follows UFO convention '
60 self.vertices = model.all_vertices
61 self.couplings = model.all_couplings
62 self.lorentz = model.all_lorentz
63 self.parameters = model.all_parameters
64 self.Parameter = self.parameters[0].__class__
65 self.orders = model.all_orders
66
67 self.functions = model.all_functions
68 self.new_external = []
69
70 if hasattr(model, 'all_propagators'):
71 self.propagators = model.all_propagators
72 else:
73 self.propagators = []
74
75
76 if hasattr(model, 'all_CTvertices'):
77 self.CTvertices = model.all_CTvertices
78 else:
79 self.CTvertices = []
80
81
82 if 'self.expr = expression' in open(pjoin(self.modelpath, 'object_library.py')).read():
83 self.translate = {'expr': 'expression'}
84 else:
85 self.translate = {}
86
87
88 self.old_new = {}
89 self.addon = addon
90
91
92 self.particle_dict = {}
93 for particle in self.particles:
94 self.particle_dict[particle.pdg_code] = particle
95
96
97 self.all_path = [self.modelpath]
98
99 - def write(self, outputdir):
119
120
122 """ propagate model restriction of the original model. """
123
124 restrict_list = [l for l in os.listdir(self.modelpath) if l.startswith('restrict_')]
125 if not self.new_external:
126
127 for p in restrict_list:
128 files.cp(pjoin(self.modelpath, p), outputdir)
129
130 else:
131
132 for p in restrict_list:
133 param_card = check_param_card.ParamCard(pjoin(self.modelpath, p))
134 for parameter in self.new_external:
135 block = parameter.lhablock
136 lhaid = parameter.lhacode
137 value = parameter.value
138 if value == 0:
139 value = 1e-99
140 elif value == 1:
141 value = 9.999999e-1
142 param_card.add_param(block.lower(), lhaid, value, 'from addon')
143
144 param_card.write(pjoin(outputdir, p))
145
146
147
148
149
150
151
152
183
184
185
186 - def create_data_text(self, obj):
187 """ create the data associate to the object"""
188
189
190
191 nb_space = 0
192 if hasattr(obj, 'require_args_all'):
193 args = obj.require_args_all
194 elif hasattr(obj, 'require_args'):
195 args = obj.require_args
196 else:
197 args = []
198 if args:
199 text = """%s = %s(""" % (obj.__repr__(), obj.__class__.__name__)
200 else:
201 text = """%s = %s(""" % (obj.name, obj.__class__.__name__)
202
203
204 for data in args:
205 if data in self.translate:
206 data = self.translate[data]
207 if not nb_space:
208 add_space = len(text)
209 else:
210 add_space = 0
211
212 try:
213 expr = getattr(obj, data)
214 except:
215 if data in ['counterterm', 'propagator', 'loop_particles']:
216 expr = None
217 setattr(obj, data, None)
218 else:
219 raise
220 name =str(data)
221 if name in self.translate:
222 name = self.translate[name]
223
224
225 text += '%s%s = %s,\n' % (' ' * nb_space,name, self.format_param(getattr(obj, data)))
226 nb_space += add_space
227
228 if hasattr(obj, 'get_all'):
229 other_attr = [name for name in obj.get_all().keys()
230 if name not in args]
231 else:
232 other_attr = obj.__dict__.keys()
233
234 for data in other_attr:
235 name =str(data)
236 if name in ['partial_widths', 'loop_particles']:
237 continue
238 if name in self.translate:
239 name = self.translate[name]
240 if not nb_space:
241 add_space = len(text)
242 else:
243 add_space = 0
244 text += '%s%s = %s,\n' % (' ' * nb_space, name, self.format_param(getattr(obj, data)))
245 nb_space += add_space
246
247 text = text[:-2] + ')\n\n'
248
249 return text
250
251 - def create_file_content(self, datalist):
252 """ """
253 return '\n'.join([self.create_data_text(obj) for obj in datalist])
254
255
256 - def write_particles(self, outputdir):
257 """ """
258 text = """
259 # This file was automatically created by The UFO_usermod
260
261 from __future__ import division
262 from object_library import all_particles, Particle
263 import parameters as Param
264
265 """
266 text += self.create_file_content(self.particles)
267 ff = open(os.path.join(outputdir, 'particles.py'), 'w')
268 ff.writelines(text)
269 ff.close()
270 return
271
273 """ """
274 text = """
275 # This file was automatically created by The UFO_usermod
276
277 from object_library import all_vertices, Vertex
278 import particles as P
279 import couplings as C
280 import lorentz as L
281
282 """
283 text += self.create_file_content(self.vertices)
284 ff = open(os.path.join(outputdir, 'vertices.py'), 'w')
285 ff.writelines(text)
286 ff.close()
287 return
288
290 """ """
291
292 if not self.CTvertices:
293 return
294
295 text = """
296 # This file was automatically created by The UFO_usermod
297
298 from object_library import all_vertices, all_CTvertices, Vertex, CTVertex
299 import particles as P
300 import couplings as C
301 import lorentz as L
302
303 """
304 text += self.create_file_content(self.CTvertices)
305 ff = open(os.path.join(outputdir, 'CT_vertices.py'), 'w')
306 ff.writelines(text)
307 ff.close()
308 return
309
310
312 """ """
313 text = """
314 # This file was automatically created by The UFO_usermod
315
316 from object_library import all_couplings, Coupling
317 """
318 text += self.create_file_content(self.couplings)
319 ff = open(os.path.join(outputdir, 'couplings.py'), 'w')
320 ff.writelines(text)
321 ff.close()
322 return
323
325 """ """
326 text = """
327 # This file was automatically created by The UFO_usermod
328
329 from object_library import all_lorentz, Lorentz
330 """
331
332 text += self.create_file_content(self.lorentz)
333 ff = open(os.path.join(outputdir, 'lorentz.py'), 'w')
334 ff.writelines(text)
335 ff.close()
336 return
337
339 """ """
340 text = """
341 # This file was automatically created by The UFO_usermod
342
343 from object_library import all_parameters, Parameter
344 """
345
346 text += self.create_file_content(self.parameters)
347 ff = open(os.path.join(outputdir, 'parameters.py'), 'w')
348 ff.writelines(text)
349 ff.close()
350 return
351
353 """ """
354 text = """
355 # This file was automatically created by The UFO_usermod
356
357 from object_library import all_orders, CouplingOrder
358 """
359
360 text += self.create_file_content(self.orders)
361 ff = open(os.path.join(outputdir, 'coupling_orders.py'), 'w')
362 ff.writelines(text)
363 ff.close()
364 return
365
367 """ """
368 text = """
369 # This file was automatically created by The UFO_usermod
370
371 import cmath
372 from object_library import all_functions, Function
373
374 """
375
376 text += self.create_file_content(self.functions)
377 ff = open(os.path.join(outputdir, 'function_library.py'), 'w')
378 ff.writelines(text)
379 ff.close()
380 return
381
383 """ """
384
385 text = """
386 # This file was automatically created by The UFO_usermod
387 from object_library import all_propagators, Propagator
388 """
389
390 text += self.create_file_content(self.propagators)
391 ff = open(os.path.join(outputdir, 'propagators.py'), 'w')
392 ff.writelines(text)
393 ff.close()
394 return
395
397 """Copy/merge the routines written in Fortran/C++/pyhton"""
398
399
400 re_fct = re.compile('''^\s{7,70}[\w\s]*function (\w*)\(''',re.M+re.I)
401 present_fct = set()
402 for dirpath in self.all_path:
403 if os.path.exists(pjoin(dirpath, 'Fortran', 'functions.f')):
404 text = open(pjoin(dirpath, 'Fortran', 'functions.f')).read()
405 new_fct = re_fct.findall(text)
406 nb_old = len(present_fct)
407 nb_added = len(new_fct)
408 new_fct = set([f.lower() for f in new_fct])
409 present_fct.update(new_fct)
410 if len(present_fct) < nb_old + nb_added:
411 logger.critical('''Some Functions in functions.f are define in more than one model.
412 This require AT LEAST manual modification of the resulting file. But more likely the
413 model need to be consider as un-physical! Use it very carefully.''')
414
415 if not os.path.exists(pjoin(outputdir, 'Fortran')):
416 os.mkdir(pjoin(outputdir, 'Fortran'))
417 fsock = open(pjoin(outputdir, 'Fortran','functions.f'),'a')
418 fsock.write(text)
419 fsock.close()
420
421
422
423 for dirpath in self.all_path:
424 for subdir in ['Fortran', 'CPP', 'Python']:
425 if os.path.exists(pjoin(dirpath, subdir)):
426 for filepath in os.listdir(pjoin(dirpath, subdir)):
427 if filepath == 'functions.f':
428 continue
429 if '.' not in filepath:
430 continue
431 logger.warning('Manual HELAS routine associated to the model. Those are not modified automaticaly!! So you need to manually checked them')
432 nb = 0
433 name, extension = filepath.rsplit('.', 1)
434
435 while 1:
436 filename = '%s%s%s' %(name, '.moved' * nb, extension)
437 if os.path.exists(pjoin(outputdir, subdir, filename)):
438 nb+=1
439 else:
440 break
441 if not os.path.exists(pjoin(outputdir, subdir)):
442 os.mkdir(pjoin(outputdir, subdir))
443 files.cp(pjoin(dirpath, subdir, filepath), pjoin(outputdir, subdir, filename))
444
445 - def get_particle(self, name):
446 """ """
447 for part in self.particles:
448 if part.name == name:
449 return part
450
451 raise USRMODERROR, 'no particle %s in the model' % name
452
460
461 - def add_particle(self, particle, identify=None):
462 """Add a particle in a consistent way"""
463
464 name = particle.name
465 if identify:
466 name = identify
467 old_part = next((p for p in self.particles if p.name==name), None)
468 if old_part:
469
470 if old_part.pdg_code == particle.pdg_code:
471 particle.replace = old_part
472 return self.check_mass_width_of_particle(old_part, particle)
473 elif identify:
474 if particle.spin != old_part.spin:
475 raise USRMODERROR, "identify particles should have the same spin"
476 elif particle.color != old_part.color:
477 raise USRMODERROR, "identify particles should have the same color"
478
479 particle.pdg_code = old_part.pdg_code
480 particle.replace = old_part
481 return self.check_mass_width_of_particle(old_part, particle)
482 else:
483 logger.warning('The particle name \'%s\' is present in both model with different pdg code' % name)
484 logger.warning('The particle coming from the plug-in model will be rename to \'%s%s\'' % (name, self.addon))
485 particle.name = '%s%s' % (name, self.addon)
486 self.particles.append(particle)
487 return
488 elif identify:
489 raise USRMODERROR, "Particle %s is not in the model" % identify
490
491 pdg = particle.pdg_code
492 if pdg in self.particle_dict:
493 particle.replace = self.particle_dict[pdg]
494 return self.check_mass_width_of_particle(self.particle_dict[pdg], particle)
495 else:
496 self.particles.append(particle)
497
498
499 - def check_mass_width_of_particle(self, p_base, p_plugin):
500
501
502 if p_base.mass.name != p_plugin.mass.name:
503
504 if p_plugin.mass.name in self.old_new:
505 if self.old_new[p_plugin.mass.name] != p_base.mass.name:
506 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model: equivalent of %s is %s != %s ' % ( p_plugin.mass.name, self.old_new[p_plugin.mass.name], p_base.mass.name)
507 elif p_base.mass.name.lower() == 'zero':
508 p_base.mass = p_plugin.mass
509 elif p_plugin.mass.name.lower() == 'zero':
510 pass
511 else:
512 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model\n' + \
513 ' Mass: %s and %s\n' %(p_base.mass.name, p_plugin.mass.name) + \
514 ' conflict name %s\n' % self.old_new + \
515 ' pdg_code: %s %s' % (p_base.pdg_code, p_plugin.pdg_code)
516
517 if p_base.width.name != p_plugin.width.name:
518
519 if p_plugin.width.name in self.old_new:
520 if self.old_new[p_plugin.width.name] != p_base.width.name:
521 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
522 elif p_base.width.name.lower() == 'zero':
523 p_base.width = p_plugin.width
524 elif p_plugin.width.name.lower() == 'zero':
525 pass
526 else:
527 raise USRMODERROR, 'Some inconsistency in the mass assignment in the model'
528
529 return
530
532 """adding a param_card parameter inside the current model.
533 if the parameter block/lhcode already exists then just do nothing
534 (but if the name are different then keep the info for future translation)
535 If the name already exists in the model. raise an exception.
536 """
537
538 name = parameter.name
539
540 old_param = next((p for p in self.parameters if p.name==name), None)
541 if old_param:
542 if old_param.lhablock == parameter.lhablock and \
543 old_param.lhacode == parameter.lhacode:
544 return
545 else:
546 logger.info('The two model defines the parameter \'%s\'\n' % parameter.name +
547 ' the original model for %s :%s\n' %(old_param.lhablock, old_param.lhacode)+
548 ' the plugin for %s :%s\n' %(parameter.lhablock,parameter.lhacode)+
549 ' We will rename the one from the plugin to %s%s' % (parameter.name, self.addon))
550 if old_param.nature == 'internal':
551 logger.warning('''The parameter %s is actually an internal parameter of the base model.
552 his value is given by %s.
553 If those two parameters are expected to be identical, you need to provide the value in the param_card according to this formula.
554 ''')
555
556 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
557 parameter.name = '%s%s' % (parameter.name, self.addon)
558
559
560
561
562
563 old_param = next((p for p in self.parameters if p.lhacode==parameter.lhacode \
564 and p.lhablock==parameter.lhablock), None)
565 if old_param:
566 logger.info('The two model defines the block \'%s\' with id \'%s\' with different parameter name \'%s\', \'%s\'\n'\
567 % (old_param.lhablock, old_param.lhacode, parameter.name, old_param.name) + \
568 ' We will merge those two parameters in a single one')
569 if parameter.name in self.old_new.values():
570 key = [k for k in self.old_new if self.old_new[k] == parameter.name][0]
571 self.old_new[key] = old_param.name
572 self.old_new[parameter.name] = old_param.name
573 else:
574 self.old_new[parameter.name] = old_param.name
575
576
577 else:
578
579 self.parameters.append(parameter)
580 self.new_external.append(parameter)
581
583 """ add a parameter of type internal """
584
585 name = parameter.name
586
587 old_param = next((p for p in self.parameters if p.name==name), None)
588 if old_param:
589 if old_param.value == parameter.value:
590 return
591 else:
592 if self.old_new:
593 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
594 def replace(matchobj):
595 return self.old_new[matchobj.group(0)]
596 parameter.value = pattern.sub(replace, parameter.value)
597 self.old_new[parameter.name] = '%s%s' % (parameter.name, self.addon)
598
599 parameter.name = '%s%s' % (parameter.name, self.addon)
600 self.parameters.append(parameter)
601 return
602
603
604 if self.old_new:
605 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
606 def replace(matchobj):
607 return self.old_new[matchobj.group(0)]
608 parameter.value = pattern.sub(replace, parameter.value)
609
610 self.parameters.append(parameter)
611
612
613
614
616 """add one coupling"""
617
618
619 name = coupling.name
620 same_name = next((p for p in self.couplings if p.name==name), None)
621 if same_name:
622 coupling.name = '%s%s' % (coupling.name, self.addon)
623
624 if self.old_new:
625 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
626 def replace(matchobj):
627 return self.old_new[matchobj.group(0)]
628 coupling.value = pattern.sub(replace, coupling.value)
629
630 old_coupling = next((p for p in self.couplings if p.value==coupling.value), None)
631
632 if old_coupling:
633 coupling.replace = old_coupling
634 else:
635 self.couplings.append(coupling)
636
638 """adding a new coupling order inside the model"""
639
640 name = coupling_order.name
641 same_name = next((p for p in self.orders if p.name==name), None)
642 if same_name:
643 if coupling_order.hierarchy != same_name.hierarchy:
644 logger.warning('%s has different hierarchy use the minimal value (%s, %s) => %s' \
645 % (name, same_name.hierarchy, coupling_order.hierarchy,
646 min(same_name.hierarchy, coupling_order.hierarchy)))
647 same_name.hierarchy = min(same_name.hierarchy, coupling_order.hierarchy)
648 if coupling_order.expansion_order != same_name.expansion_order:
649 logger.warning('%s has different expansion_order use the minimal value (%s, %s) => %s' \
650 % (name, coupling_order.expansion_order, same_name.expansion_order,
651 min(same_name.expansion_order, coupling_order.expansion_order)))
652 same_name.expansion_order = min(same_name.expansion_order, coupling_order.expansion_order)
653 if hasattr(same_name, 'perturbative_expansion') and same_name.perturbative_expansion:
654 logger.info('%s will be forbidden to run at NLO' % same_name.name)
655 same_name.perturbative_expansion = 0
656
657
658 else:
659 self.orders.append(coupling_order)
660
662 """add one coupling"""
663
664
665 name = lorentz.name
666 same_name = next((p for p in self.lorentz if p.name==name), None)
667 if same_name:
668 lorentz.name = '%s%s' % (lorentz.name, self.addon)
669
670 if self.old_new:
671 pattern = re.compile(r'\b(%s)\b' % '|'.join(self.old_new.keys()))
672 def replace(matchobj):
673 return self.old_new[matchobj.group(0)]
674 lorentz.structure = pattern.sub(replace, lorentz.structure)
675
676 old_lor = next((p for p in self.lorentz
677 if p.structure==lorentz.structure and p.spins == lorentz.spins),
678 None)
679
680 if old_lor:
681 lorentz.replace = old_lor
682 else:
683 self.lorentz.append(lorentz)
684
686 """Add one interaction to the model. This is UNCONDITIONAL!
687 if the same interaction is in the model this means that the interaction
688 will appear twice. This is now weaken if both interaction are exactly identical!
689 (EXACT same color/lorentz/coupling expression)
690 """
691
692
693 name = interaction.name
694 same_name = next((p for p in self.vertices if p.name==name), None)
695 if same_name:
696 interaction.name = '%s%s' % (interaction.name, self.addon)
697
698
699 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
700 interaction.particles = particles
701
702
703 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
704 interaction.lorentz = lorentz
705
706
707 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
708 for key, c in interaction.couplings.items()]
709 interaction.couplings = dict(couplings)
710
711
712
713
714 get_pdg = lambda vertex: sorted([p.pdg_code for p in vertex.particles])
715 id_part = get_pdg(interaction)
716 iden_vertex = [v for v in self.vertices if get_pdg(v) == id_part]
717 iden = False
718 nb_coupling = len(interaction.couplings)
719 keys = interaction.couplings.keys()
720
721 get_lor_and_color = lambda i: (interaction.lorentz[keys[i][1]].structure,
722 interaction.color[keys[i][0]])
723 for v in iden_vertex:
724 if len(v.couplings) != nb_coupling:
725 continue
726 found = []
727 for ((i,j), coup) in v.couplings.items():
728 new_lorentz = v.lorentz[j].structure
729 new_color = v.color[i]
730 k=0
731 same = [k for k in range(nb_coupling) if k not in found and
732 get_lor_and_color(k) == (new_lorentz, new_color)]
733 if not same:
734 break
735 else:
736 for k in same:
737 if interaction.couplings[keys[k]] == coup:
738 found.append(k)
739 break
740 else:
741
742 for k in same:
743 if interaction.couplings[keys[k]].order == coup.order:
744 found.append(k)
745 warning = """Did NOT add interaction %s since same particles/lorentz/color/coupling order
746 BUT did not manage to ensure that the coupling is the same. couplings expression:
747 base model: %s
748 addon model: %s
749 """ % (id_part, coup.value, interaction.couplings[keys[k]].value)
750 logger.warning(warning)
751 found.append(k)
752 break
753 else:
754 pass
755
756 else:
757
758 return
759
760 logger.info('Adding interaction for the following particles: %s' % id_part)
761
762
763
764
765 self.vertices.append(interaction)
766
768 """Add one interaction to the model. This is UNCONDITIONAL!
769 if the same interaction is in the model this means that the interaction
770 will appear twice."""
771
772
773 name = interaction.name
774 same_name = next((p for p in self.vertices if p.name==name), None)
775 if same_name:
776 interaction.name = '%s%s' % (interaction.name, self.addon)
777
778
779 particles = [p.replace if hasattr(p, 'replace') else p for p in interaction.particles]
780 interaction.particles = particles
781
782
783 lorentz = [l.replace if hasattr(l, 'replace') else l for l in interaction.lorentz]
784 interaction.lorentz = lorentz
785
786
787 couplings = [(key, c.replace) if hasattr(c, 'replace') else (key, c)
788 for key, c in interaction.couplings.items()]
789 interaction.couplings = dict(couplings)
790
791
792
793 loop_particles=[ [p.replace if hasattr(p, 'replace') else p for p in plist]
794 for plist in interaction.loop_particles]
795 interaction.loop_particles = loop_particles
796 self.CTvertices.append(interaction)
797
798
799 - def add_model(self, model=None, path=None, identify_particles=None):
800 """add another model in the current one"""
801
802 self.new_external = []
803 if path:
804 model = ufomodels.load_model(path)
805
806 if not model:
807 raise USRMODERROR, 'Need a valid Model'
808 else:
809 path = model.__path__[0]
810
811 if not hasattr(model, 'all_orders'):
812 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (no couplings_order information)\n' +\
813 'MG5 is able to load such model but NOT to the add model feature.'
814 if isinstance(model.all_particles[0].mass, basestring):
815 raise USRMODERROR, 'Add-on Model doesn\'t follows UFO convention (Mass/Width of particles are string name, not object)\n' +\
816 'MG5 is able to load such model but NOT to the add model feature.'
817
818 for order in model.all_orders:
819 if hasattr(order, 'perturbative_expansion') and order.perturbative_expansion:
820 raise USRMODERROR, 'Add-on model can not be loop model.'
821
822 for order in model.all_orders:
823 self.add_coupling_order(order)
824 for parameter in model.all_parameters:
825 self.add_parameter(parameter)
826 for coupling in model.all_couplings:
827 self.add_coupling(coupling)
828 for lorentz in model.all_lorentz:
829 self.add_lorentz(lorentz)
830 for particle in model.all_particles:
831 if particle.name in identify_particles:
832 self.add_particle(particle, identify=identify_particles[particle.name])
833 else:
834 self.add_particle(particle)
835 for vertex in model.all_vertices:
836 self.add_interaction(vertex)
837
838 self.all_path.append(path)
839
840
841 return
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877