diff --git a/README-en.md b/README-en.md index 608ffdf..d4c2a08 100644 --- a/README-en.md +++ b/README-en.md @@ -1,7 +1,7 @@ # ISAT with segment anything # Interactive semi-automatic annotation tool for image segmentation. -![annotate.gif](./display/标注%20-big-original.gif) +![annotate.gif](./display/标注.gif) Quick annotate for image segmentation by [segment anything](https://github.com/facebookresearch/segment-anything) diff --git a/README.md b/README.md index f4df164..87ec954 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ # ISAT with segment anything # 交互式半自动图像分割标注工具 -![标注.gif](./display/标注%20-big-original.gif) +![标注.gif](./display/标注.gif) **如果这个项目给您的工作生活带来了便捷,请给一个Star;如果想对该项目贡献代码,请发Pull requests** diff --git a/UpdateLog.md b/UpdateLog.md index f0b114a..e0b059f 100644 --- a/UpdateLog.md +++ b/UpdateLog.md @@ -25,4 +25,11 @@ # - 添加了ISAT格式json转LabelMe格式json的功能 -- 优化部分界面 \ No newline at end of file +- 优化部分界面 + +# 新版本2.0 + +1. 更新了界面,现在左侧选择类别,试用SAM时,直接快捷键Q,鼠标提示,E完成标注即可,不再选择类别与组。 +2. 菜单栏添加了mask转polygon的方式选择,分为a.保存所有外轮廓(单个多边形),b.保存顶点数最多的外轮廓(单个多边形),c.保存所有轮廓(内轮廓多边形类别默认为__background__) +3. 菜单栏添加了模型选择,列出segment_any文件夹下的所有.pth权重文件。 +4. 支持了SAM-HQ \ No newline at end of file diff --git a/annotation.py b/annotation.py index 8aef041..1992622 100644 --- a/annotation.py +++ b/annotation.py @@ -84,7 +84,7 @@ class Annotation: if not is_polygon: continue category = shape.get('label', 'unknow') - group = shape.get('group_id', '') + group = shape.get('group_id', 0) if group is None: group = '' segmentation = shape.get('points', []) iscrowd = shape.get('iscrowd', 0) diff --git a/configs.py b/configs.py index 8cab9f6..bb3aaea 100644 --- a/configs.py +++ b/configs.py @@ -32,4 +32,9 @@ class CLICKMode(Enum): class MAPMode(Enum): LABEL = 0 SEMANTIC = 1 - INSTANCE = 2 \ No newline at end of file + INSTANCE = 2 + +class CONTOURMode(Enum): + SAVE_MAX_ONLY = 0 # 只保留最多顶点的mask(一般为最大面积) + SAVE_EXTERNAL = 1 # 只保留外轮廓 + SAVE_ALL = 2 # 保留所有轮廓 \ No newline at end of file diff --git a/default.yaml b/default.yaml index 130b2a5..2304cf1 100644 --- a/default.yaml +++ b/default.yaml @@ -1,3 +1,5 @@ label: - color: '#000000' name: __background__ +- color: '#00ff00' + name: aaaa diff --git a/display/标注.gif b/display/标注.gif new file mode 100644 index 0000000..abd69d5 Binary files /dev/null and b/display/标注.gif differ diff --git a/icons.qrc b/icons.qrc index c0f228b..35e469e 100644 --- a/icons.qrc +++ b/icons.qrc @@ -1,6 +1,7 @@ icons/semantic.png + icons/眼睛_eyes.svg icons/VOC_32x32.png icons/labelme_32x32.png icons/coco.ico diff --git a/icons/眼睛_eyes.svg b/icons/眼睛_eyes.svg new file mode 100644 index 0000000..048e3ad --- /dev/null +++ b/icons/眼睛_eyes.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/icons_rc.py b/icons_rc.py index 7a25cfb..f41a5b3 100644 --- a/icons_rc.py +++ b/icons_rc.py @@ -9,377 +9,6 @@ from PyQt5 import QtCore qt_resource_data = b"\ -\x00\x00\x03\xca\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\ -\x39\x43\x36\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x37\x2e\x33\x34\ -\x33\x31\x35\x20\x36\x20\x39\x20\x36\x48\x33\x34\x2e\x32\x38\x31\ -\x34\x4c\x34\x32\x20\x31\x33\x2e\x32\x30\x36\x35\x56\x33\x39\x43\ -\x34\x32\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x34\x30\x2e\x36\x35\ -\x36\x39\x20\x34\x32\x20\x33\x39\x20\x34\x32\x48\x39\x43\x37\x2e\ -\x33\x34\x33\x31\x35\x20\x34\x32\x20\x36\x20\x34\x30\x2e\x36\x35\ -\x36\x39\x20\x36\x20\x33\x39\x56\x39\x5a\x22\x20\x66\x69\x6c\x6c\ -\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\x75\ -\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\x69\ -\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\ -\x20\x64\x3d\x22\x4d\x32\x34\x2e\x30\x30\x38\x33\x20\x36\x4c\x32\ -\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x43\x32\x34\x20\x31\x33\x2e\ -\x37\x32\x34\x35\x20\x32\x33\x2e\x35\x35\x32\x33\x20\x31\x34\x20\ -\x32\x33\x20\x31\x34\x48\x31\x35\x43\x31\x34\x2e\x34\x34\x37\x37\ -\x20\x31\x34\x20\x31\x34\x20\x31\x33\x2e\x37\x32\x34\x35\x20\x31\ -\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x4c\x31\x34\x20\x36\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\ -\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x2e\x30\x30\x38\x33\x20\x36\ -\x4c\x32\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x43\x32\x34\x20\x31\ -\x33\x2e\x37\x32\x34\x35\x20\x32\x33\x2e\x35\x35\x32\x33\x20\x31\ -\x34\x20\x32\x33\x20\x31\x34\x48\x31\x35\x43\x31\x34\x2e\x34\x34\ -\x37\x37\x20\x31\x34\x20\x31\x34\x20\x31\x33\x2e\x37\x32\x34\x35\ -\x20\x31\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x4c\x31\x34\x20\x36\ -\x48\x32\x34\x2e\x30\x30\x38\x33\x5a\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ -\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\ -\x39\x20\x36\x48\x33\x34\x2e\x32\x38\x31\x34\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\x20\x32\x36\x48\ -\x33\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ -\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ -\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ -\x4d\x31\x34\x20\x33\x34\x48\x32\x34\x2e\x30\x30\x38\x33\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ -\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x02\x48\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ -\x20\x34\x34\x43\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x34\x20\x34\ -\x34\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x34\x20\x32\x34\x43\ -\x34\x34\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x33\x35\x2e\x30\x34\ -\x35\x37\x20\x34\x20\x32\x34\x20\x34\x43\x31\x32\x2e\x39\x35\x34\ -\x33\x20\x34\x20\x34\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x20\ -\x32\x34\x43\x34\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x31\x32\x2e\ -\x39\x35\x34\x33\x20\x34\x34\x20\x32\x34\x20\x34\x34\x5a\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ -\x4d\x32\x39\x2e\x36\x35\x36\x37\x20\x31\x38\x2e\x33\x34\x33\x32\ -\x4c\x31\x38\x2e\x33\x34\x33\x20\x32\x39\x2e\x36\x35\x36\x39\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\ -\x2e\x33\x34\x33\x33\x20\x31\x38\x2e\x33\x34\x33\x32\x4c\x32\x39\ -\x2e\x36\x35\x37\x20\x32\x39\x2e\x36\x35\x36\x39\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ -\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ -\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x03\xee\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x20\ -\x32\x38\x43\x31\x30\x2e\x32\x30\x39\x31\x20\x32\x38\x20\x31\x32\ -\x20\x32\x36\x2e\x32\x30\x39\x31\x20\x31\x32\x20\x32\x34\x43\x31\ -\x32\x20\x32\x31\x2e\x37\x39\x30\x39\x20\x31\x30\x2e\x32\x30\x39\ -\x31\x20\x32\x30\x20\x38\x20\x32\x30\x43\x35\x2e\x37\x39\x30\x38\ -\x36\x20\x32\x30\x20\x34\x20\x32\x31\x2e\x37\x39\x30\x39\x20\x34\ -\x20\x32\x34\x43\x34\x20\x32\x36\x2e\x32\x30\x39\x31\x20\x35\x2e\ -\x37\x39\x30\x38\x36\x20\x32\x38\x20\x38\x20\x32\x38\x5a\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ -\x4d\x38\x20\x31\x32\x43\x39\x2e\x31\x30\x34\x35\x37\x20\x31\x32\ -\x20\x31\x30\x20\x31\x31\x2e\x31\x30\x34\x36\x20\x31\x30\x20\x31\ -\x30\x43\x31\x30\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x39\x2e\x31\ -\x30\x34\x35\x37\x20\x38\x20\x38\x20\x38\x43\x36\x2e\x38\x39\x35\ -\x34\x33\x20\x38\x20\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x36\ -\x20\x31\x30\x43\x36\x20\x31\x31\x2e\x31\x30\x34\x36\x20\x36\x2e\ -\x38\x39\x35\x34\x33\x20\x31\x32\x20\x38\x20\x31\x32\x5a\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ -\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ -\x20\x64\x3d\x22\x4d\x38\x20\x34\x30\x43\x39\x2e\x31\x30\x34\x35\ -\x37\x20\x34\x30\x20\x31\x30\x20\x33\x39\x2e\x31\x30\x34\x36\x20\ -\x31\x30\x20\x33\x38\x43\x31\x30\x20\x33\x36\x2e\x38\x39\x35\x34\ -\x20\x39\x2e\x31\x30\x34\x35\x37\x20\x33\x36\x20\x38\x20\x33\x36\ -\x43\x36\x2e\x38\x39\x35\x34\x33\x20\x33\x36\x20\x36\x20\x33\x36\ -\x2e\x38\x39\x35\x34\x20\x36\x20\x33\x38\x43\x36\x20\x33\x39\x2e\ -\x31\x30\x34\x36\x20\x36\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\ -\x38\x20\x34\x30\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ -\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x32\ -\x34\x48\x34\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ -\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ -\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ -\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ -\x3d\x22\x4d\x32\x30\x20\x33\x38\x48\x34\x34\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x31\x30\x48\ -\x34\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ -\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ -\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x01\xc6\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\ -\x39\x56\x34\x31\x4c\x39\x20\x32\x31\x48\x33\x39\x2e\x35\x56\x31\ -\x35\x43\x33\x39\x2e\x35\x20\x31\x33\x2e\x38\x39\x35\x34\x20\x33\ -\x38\x2e\x36\x30\x34\x36\x20\x31\x33\x20\x33\x37\x2e\x35\x20\x31\ -\x33\x48\x32\x34\x4c\x31\x39\x20\x37\x48\x36\x43\x34\x2e\x38\x39\ -\x35\x34\x33\x20\x37\x20\x34\x20\x37\x2e\x38\x39\x35\x34\x33\x20\ -\x34\x20\x39\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ -\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ -\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ -\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ -\x3d\x22\x4d\x34\x30\x20\x34\x31\x4c\x34\x34\x20\x32\x31\x48\x38\ -\x2e\x38\x31\x32\x35\x4c\x34\x20\x34\x31\x48\x34\x30\x5a\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ -\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ -\x2f\x73\x76\x67\x3e\ -\x00\x00\x01\xe4\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ -\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ -\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ -\x22\x20\x64\x3d\x22\x4d\x34\x34\x20\x34\x30\x2e\x38\x33\x36\x31\ -\x43\x33\x39\x2e\x31\x30\x36\x39\x20\x33\x34\x2e\x38\x36\x33\x32\ -\x20\x33\x34\x2e\x37\x36\x31\x37\x20\x33\x31\x2e\x34\x37\x33\x39\ -\x20\x33\x30\x2e\x39\x36\x34\x34\x20\x33\x30\x2e\x36\x36\x38\x32\ -\x43\x32\x37\x2e\x31\x36\x37\x31\x20\x32\x39\x2e\x38\x36\x32\x35\ -\x20\x32\x33\x2e\x35\x35\x31\x37\x20\x32\x39\x2e\x37\x34\x30\x38\ -\x20\x32\x30\x2e\x31\x31\x38\x32\x20\x33\x30\x2e\x33\x30\x33\x56\ -\x34\x31\x4c\x34\x20\x32\x33\x2e\x35\x34\x35\x33\x4c\x32\x30\x2e\ -\x31\x31\x38\x32\x20\x37\x56\x31\x37\x2e\x31\x36\x37\x43\x32\x36\ -\x2e\x34\x36\x36\x37\x20\x31\x37\x2e\x32\x31\x37\x32\x20\x33\x31\ -\x2e\x38\x36\x33\x38\x20\x31\x39\x2e\x34\x39\x34\x38\x20\x33\x36\ -\x2e\x33\x30\x39\x35\x20\x32\x34\x43\x34\x30\x2e\x37\x35\x35\x33\ -\x20\x32\x38\x2e\x35\x30\x35\x32\x20\x34\x33\x2e\x33\x31\x38\x37\ -\x20\x33\x34\x2e\x31\x31\x37\x32\x20\x34\x34\x20\x34\x30\x2e\x38\ -\x33\x36\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ -\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ -\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\ -\x76\x67\x3e\ -\x00\x00\x02\x4e\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\ -\x20\x34\x34\x48\x33\x38\x43\x33\x39\x2e\x31\x30\x34\x36\x20\x34\ -\x34\x20\x34\x30\x20\x34\x33\x2e\x31\x30\x34\x36\x20\x34\x30\x20\ -\x34\x32\x56\x31\x34\x48\x33\x30\x56\x34\x48\x31\x30\x43\x38\x2e\ -\x38\x39\x35\x34\x33\x20\x34\x20\x38\x20\x34\x2e\x38\x39\x35\x34\ -\x33\x20\x38\x20\x36\x56\x34\x32\x43\x38\x20\x34\x33\x2e\x31\x30\ -\x34\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x34\x20\x31\x30\ -\x20\x34\x34\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ -\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ -\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ -\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\ -\x30\x20\x34\x4c\x34\x30\x20\x31\x34\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ -\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ -\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x33\x35\x4c\x33\x31\ -\x20\x32\x35\x4c\x32\x37\x20\x32\x31\x4c\x31\x37\x20\x33\x31\x56\ -\x33\x35\x48\x32\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\ -\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ -\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ -\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x06\x28\ -\x00\ -\x00\x15\xb6\x78\x9c\xed\x58\x4b\x6f\x1b\x55\x14\x3e\x1e\xcf\xd8\ -\x5e\x40\x95\xa2\x4a\x6c\x90\x1a\x81\x40\xdd\x20\xaa\x82\xd8\x20\ -\xa1\x6c\x40\x6c\x40\x65\xd1\x56\x6a\x41\x8a\xda\xc4\xb1\x13\x37\ -\x71\x92\x26\xe4\xe1\x24\xe3\x84\x1f\x90\x0a\xba\xa8\x5a\xa1\xb0\ -\x40\x42\x48\x88\x2c\x2a\xba\x01\xe1\x1d\x20\x2a\x6a\x21\x84\x8a\ -\x08\xf1\xf8\x11\xe7\x65\xd7\x93\xc4\x8f\x38\xb6\x7a\xf9\xce\x8c\ -\xc7\x8d\x5d\xdb\x6d\x61\xd1\x05\xb9\xd2\xa7\xf1\x3c\xce\x7c\xdf\ -\x39\xf7\xde\x73\xce\x98\xc8\x46\x12\xb5\xb7\x13\x8e\xed\x74\xec\ -\x30\xd1\x4b\x44\xd4\xd6\x66\x9e\x7b\x64\xa2\xf7\x70\xed\x18\xae\ -\xe1\x11\xea\x20\xf3\xba\x31\xda\xe8\x60\x1c\x8c\x83\xd1\x62\x14\ -\x54\x6a\x2f\xcd\x51\x07\x63\x4f\xa5\xe3\xff\xf5\x7d\x99\x31\x6a\ -\xcf\x8c\xc9\x1d\x8c\xec\x44\xf3\xf7\x31\x57\x69\x96\x42\x80\xa8\ -\x43\xf8\xdf\xe8\x58\xf5\xbb\xda\x37\x86\xe4\x50\xfa\x92\x5d\xdc\ -\x1d\xb1\x0b\xfd\x23\x49\xec\x4c\x48\x22\x1f\xb0\x69\x85\x80\x91\ -\x16\x6a\xb8\xf7\x82\xa4\x03\xa2\x14\x7c\x80\x9f\xa1\x97\xe7\xe8\ -\xe4\xa3\x72\x27\xfc\xca\xf1\xf5\x41\x59\xdf\x18\x94\xc5\xe6\x90\ -\x5c\xc3\x9f\x0b\xd8\x44\x7e\xca\x26\x10\xe7\xaa\x86\xa2\x6a\xf8\ -\x28\x98\x7f\xaf\x31\x3f\xeb\xd2\x1f\x25\x0e\x11\x3f\xb5\xad\xfa\ -\x15\x6d\x6d\x40\x11\xeb\x7e\x93\x3f\x33\x6a\xf1\xdb\x0c\xfe\xdd\ -\x19\x83\x47\xb3\x7c\xdf\x9d\xc6\x39\xf8\x8b\xa6\x06\xbd\x18\x24\ -\x75\x6f\x96\x3a\xeb\xe7\x03\xf7\xc2\x42\x6d\x9d\x45\x93\xfd\xf2\ -\x62\xb2\x5f\x11\x0c\xe6\x4f\x0d\xdb\x01\x79\x1e\x1a\x4e\x6e\x8f\ -\x4b\x61\xe6\x2f\x4c\x9b\x71\xe6\x98\xe2\xb7\xca\xfc\x0c\x83\x5f\ -\x25\x7f\xcd\xdc\xcc\xd1\x42\x8d\x06\x95\xe6\x9b\xc6\xbd\x57\xe9\ -\x4c\xf8\x1c\x82\xb1\x72\xd1\xd4\xb0\x31\xa4\x74\x5a\xf7\x33\xd0\ -\x0e\x7e\x9d\xfd\xaf\xc4\x54\xcd\x06\x28\x94\x0b\x10\xd6\x05\x89\ -\xc2\x14\x89\x7a\xff\xf8\x9c\xd7\xa0\xc5\xcf\xb6\xd9\x06\xf3\xc0\ -\x71\x8f\xf7\x39\xf4\x58\xaf\xc5\xef\x10\x49\xbf\xf2\x80\xd6\x5c\ -\x40\x5a\x28\x4c\xdb\x2c\x7f\x42\xcc\xbf\x33\x41\xc2\xd2\xd0\xc8\ -\x2f\x63\x6f\x04\xab\x73\x80\xf8\xd9\x42\xf5\xcf\xc4\xfa\x1c\x0b\ -\xcc\xcd\x88\x7a\x9d\x02\x5a\xb4\x46\xef\xca\x4d\x4b\x2a\xfb\xc0\ -\xb1\x66\xfe\x9d\x71\x03\x62\x1b\xe0\x63\xb3\xd8\x72\xac\x2c\x0d\ -\x46\x0c\xa6\xa8\x1a\xd7\xb8\xcf\xd5\xa1\x79\x9c\xc2\x00\xb8\x0d\ -\x7e\x5c\x6b\xc6\x9f\x87\xff\x16\xbf\x3e\x46\xa1\xcc\x08\xf8\xc7\ -\x48\xe8\xa3\xcd\xf9\x2b\x1a\xc2\x96\x86\xec\xa4\x54\xf5\x0f\xbc\ -\x4b\xcc\x1d\xe9\x31\x35\x44\x3c\xce\xa6\x6b\x64\x27\x00\x7e\xec\ -\xbf\xca\x3e\x0b\xa5\x47\x68\xfe\xee\x25\x12\xe9\x61\x93\x9f\xf3\ -\x55\x53\x7e\xec\xd9\x52\x25\x47\xb0\xfe\xad\x09\x49\x8d\xfb\x9c\ -\xdf\x44\xbd\x2e\xc3\xef\x0a\xbf\xce\x6b\xa1\x29\xff\xa4\x6d\x91\ -\xfd\xdf\x9d\x31\x34\x2c\x6c\xfa\xc9\x0f\x88\xd4\x20\x30\x04\xfe\ -\xe1\xd6\x79\x66\x17\xeb\xdf\xd2\x80\x7c\xb2\x15\xeb\x75\xde\x63\ -\x7e\x06\xfc\xc6\xfc\xbb\x3a\x5b\xd9\x67\x03\xb6\x30\x73\x1b\x6b\ -\x00\x73\x0a\xee\x8e\xb5\x7e\x12\xeb\x03\x24\xf8\x08\x1d\x4d\x63\ -\xc7\x83\xf7\x03\xd6\xa3\xc6\x73\xb0\x36\x20\x8b\xa8\xc7\x9c\x6f\ -\xe6\x5f\xf1\x39\x7f\x6f\x65\x6b\xec\xbf\x29\xd3\x77\x63\xfe\xe7\ -\xcc\x1c\xb8\x36\x40\x7a\xb2\x0f\xef\xbb\x48\x02\x47\xbd\xd5\x3b\ -\x78\xec\x20\x7f\x73\x2e\x8b\x7a\x1c\x15\x38\x0d\xac\xf8\x1c\x67\ -\x5a\xfa\x8e\x35\x6b\xc5\x9e\xf9\xad\xbd\x9e\xf0\xd2\x02\x20\x56\ -\x7a\x4d\xc4\x3c\xf7\xd7\x76\xa3\x01\x8d\x47\x63\x1e\x45\x44\x7b\ -\x94\xaa\x86\xa4\xcf\xf1\xd3\xc3\x74\xf3\xbe\xad\xfa\x1e\xa4\x45\ -\xeb\x7a\xdc\x43\x1d\x80\x60\x0d\x51\x37\x09\xfc\xd6\x22\x9d\xcd\ -\xd7\x90\xe6\xb6\x67\xa2\x3d\xb2\xc9\x0f\xac\xf8\x14\xb1\x3d\x21\ -\x8d\xb7\xe4\xc6\xda\xb5\xb8\x8d\x3c\x3b\x5b\xeb\x23\x7c\x0e\xc5\ -\x7a\x4c\xfe\x48\x97\x71\x5c\x68\xc8\xdd\x2d\x2d\x81\x5f\x68\x6e\ -\xd9\x00\xeb\xe0\xb9\xe0\x9c\xd6\x28\x2f\x5a\x83\x6b\x5c\xf1\x7e\ -\x8d\xd1\xea\xef\x47\xba\xe9\xb8\xd6\x6d\x72\xef\xc3\x62\xc4\x63\ -\xee\x47\xfc\x3e\x03\xee\x0c\x00\x5e\x86\xdd\xc0\xfa\x80\x74\x27\ -\x3b\x69\xe6\x6e\xec\x8d\x86\x35\x92\xf7\xd9\x3e\x6e\x51\x52\x6b\ -\xeb\x7f\x55\x43\x17\xa9\x75\xfc\x06\x34\x0b\xdd\xb6\x0a\x24\x03\ -\x31\x8f\xb4\xc4\x76\xc8\x9b\xe1\xfc\x94\x99\x17\xc1\xa1\x03\xaa\ -\xd1\x3f\x21\xc6\x80\x66\xd5\xf5\x4a\x7f\xd1\x72\x7f\x81\x6f\xa1\ -\x29\xbf\x01\x53\x43\xd4\x6d\x5b\xb2\x6c\x32\xc8\x37\xb9\x49\x0a\ -\x1b\x75\xbc\x59\xff\xc0\xb5\x63\xae\xf1\x9c\x36\xd0\xe0\x07\xf4\ -\xc6\xfc\x74\x0f\xbe\x5f\xaf\xb7\xe1\xbd\x5d\x9c\xae\xad\xd5\x75\ -\xfd\x8b\xfa\x28\xdc\x55\x0d\x58\xff\x15\x1d\x8b\xe0\x4c\x00\x4b\ -\xc0\xf5\xe5\x0b\x74\xb4\x95\x5d\xa5\x97\x9b\xaf\xf4\x2e\x21\xc4\ -\xdf\xcf\x3d\xed\xe3\x70\x1f\x8c\x83\xf1\x7f\x19\xfc\x3f\x09\x17\ -\x9a\x76\xda\xf7\x3f\x89\xfc\xc4\xe4\x3c\x74\xa0\x66\x4b\x8c\xc7\ -\x7d\x4e\x5c\x25\x05\x39\xf9\x1d\xe4\x84\xcb\xe5\x20\x7d\x0a\xbc\ -\x8b\xfb\x8e\x07\xec\x4e\x91\x1d\xdf\x6e\x6f\xa7\x2f\xc9\x97\xb7\ -\xc6\xa4\x2b\xf9\x49\x7a\x9f\x9f\x43\xaf\x74\x0a\x48\xc2\xbe\x0c\ -\x94\x80\x14\x72\x4b\x57\xbd\x96\xd5\x7e\xc7\x69\x7c\xf7\xac\xe1\ -\xbb\xb3\x8c\xef\x9d\x3d\xf4\x9a\x29\xe4\xa3\x0f\x91\x9b\xbf\x2d\ -\xce\x50\x11\xb5\x21\x58\x9e\xa5\x0f\x60\x7f\x07\xf6\x51\x1c\xdf\ -\xb0\x6c\xe3\x3e\xc7\xcb\xe8\x61\xfe\x58\x1d\x50\xf2\x9b\xc3\x72\ -\x70\x7b\xdc\xde\x8d\xba\x9a\xc6\x33\xdf\xe7\x26\x28\x8e\xef\x83\ -\x24\x6a\xdc\xf3\xfc\x2c\xf4\x9f\xc2\xf5\x2d\xbc\xef\x6b\x31\x4a\ -\x4f\x23\x5f\xba\xf0\x0d\xf0\x99\xf9\xfd\xe1\xbc\xc2\xe7\x69\x95\ -\x0e\xc1\xfe\x3b\xe4\xde\x15\xf4\xd9\x09\xf4\xd8\x89\xfc\x28\x3d\ -\x57\x89\x8f\x83\x6b\x11\xb4\x65\xf1\x0d\x76\x0e\x3d\xcc\x69\xf4\ -\xb0\x59\xe0\x76\xa2\xcf\xc9\x7f\xa9\xd1\x86\x4a\x4f\xc1\xfe\x06\ -\x38\x92\xe9\x21\xfa\x19\x3d\x72\x4a\x1f\xa1\x13\x96\xde\xdd\x39\ -\x7a\x91\x7b\xf2\xec\x94\xed\x4f\xf4\xa1\xcb\xe8\x7d\x77\x35\x8f\ -\xeb\xbc\x75\x7f\x5b\xa5\x23\xe8\xb7\x6e\x71\xed\x47\x6f\x79\x15\ -\xfd\x5d\x79\xdd\x47\x9e\xfd\xf1\xc2\xbd\xb3\x88\x55\x8e\x7b\xc8\ -\x98\xd7\x79\x33\xea\xa5\xc3\xd6\x3d\xd4\xb4\xd7\xc0\x9f\xc2\x33\ -\x9f\x27\xbd\x74\x32\xee\xa5\x2d\xf4\x46\x3f\x2c\x77\xd1\xb3\xd5\ -\x98\xb9\xed\x67\xd1\x8b\xe5\xe2\xbd\x4a\x69\x7d\x50\x71\xef\x9f\ -\xff\xc2\x8c\x34\x07\xdb\x6d\xcc\xdb\xb9\xbf\x2e\xd2\x21\xf4\x34\ -\x5f\xa1\xc7\x28\xa3\xa6\x5c\x8e\xb8\xe9\x18\x7a\x8e\xce\xa8\x5b\ -\x8a\xa1\x8f\x28\xa5\x86\xa4\x0c\x7a\x88\x9b\xf0\xe9\x05\xd8\xca\ -\xf0\xeb\x2d\xd8\xc6\x60\x7b\x03\xe7\xcf\xf0\x3b\x35\x37\x9d\x80\ -\xed\x6d\xa0\x8c\x5a\xb4\x09\x14\x50\x8b\xf7\xd0\x8f\x5c\x43\x7c\ -\xcf\x20\x8e\x7f\x23\x9e\xbf\xa2\xce\x7e\x89\xb9\x59\xc5\xef\xdf\ -\xf6\x3e\xa6\xd7\xf7\xfb\xbb\x7c\x9e\x5e\x81\x86\x4f\x60\x7b\x1b\ -\xf8\x91\x6b\x1c\xfb\x2c\x04\xd9\x50\xa7\xde\x44\xfd\xfa\x02\xdc\ -\xbf\x60\x8d\x5c\xc3\xfa\x78\xb5\xd1\xda\xbe\xe5\x26\x25\xe9\xa6\ -\x23\xf1\x0b\xf4\x8c\xe0\x6d\x5b\xbb\xee\x5d\xac\xb7\xd1\xda\x7e\ -\x92\xe3\x1f\x0b\x0c\xb3\x17\ \x00\x00\x02\x7e\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ @@ -422,26 +51,7 @@ qt_resource_data = b"\ \x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ \x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ \x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x01\x10\ -\x00\ -\x00\x04\x42\x78\x9c\xc5\x93\x51\x6e\x83\x30\x0c\x86\xaf\x62\x79\ -\xcf\x25\x29\xa0\x0a\x4d\x84\x4a\x7d\xe8\x09\xb6\x03\x54\x34\x40\ -\x36\x9a\x74\x21\x23\x70\xfb\x39\x1b\x74\xe3\x71\x5a\xc5\xa4\x48\ -\xb1\xad\xe8\xfb\x13\xfb\x4f\xbe\x1f\x2e\x2d\xf4\xd2\x76\xca\x68\ -\x81\xdb\x88\x23\x48\x5d\x9a\xb3\xd2\xb5\xc0\xe7\xa7\xe3\x26\xc3\ -\x7d\x91\x77\x7d\x0d\x5e\x9d\x5d\x23\x30\x89\x11\x1a\xa9\xea\xc6\ -\x7d\xc5\xbd\x92\xfe\x60\x06\x81\x1c\x38\xa4\x19\x2d\x84\x4a\xb5\ -\xad\x40\x6d\xb4\x44\x20\xbe\xee\x04\x36\xce\x5d\x1f\x19\xf3\xde\ -\x47\x3e\x89\x8c\xad\x59\xcc\x39\x67\x04\xc6\x22\x6f\x95\x96\x30\ -\x6c\x49\x3f\x45\x18\x69\x27\xc6\x10\x13\x3f\xa4\xf1\x67\xda\x39\ -\x6b\x5e\xa5\xc0\x07\xce\x77\x69\x55\xcd\x85\xcd\x7c\xab\x5b\x21\ -\xb0\xca\xd3\x55\x60\xf7\xf6\x7e\xb2\x12\xd9\xfa\xf8\x94\x2f\xf9\ -\x21\xff\x8b\x80\x95\xa5\x03\xea\x70\xb2\x23\x1e\xe1\x70\x9e\x45\ -\xf6\x3d\x0a\x0a\x2d\x1d\x89\x97\xcd\xff\x95\xea\x8b\x51\x64\x81\ -\x8b\x72\xd2\xfe\x54\x4d\xff\x43\x74\x7a\x6a\xd8\xd6\x7f\xeb\x5a\ -\xaa\x37\xe3\x04\x83\x8c\x93\x81\x86\xc9\x30\xe3\x64\xa0\xbb\x38\ -\x33\x5b\xf2\xb3\xfb\xe0\xc3\xef\x2d\x3e\x00\x2f\xe9\x4b\x1d\ -\x00\x00\x03\x43\ +\x00\x00\x02\x4e\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ \x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ @@ -451,336 +61,35 @@ qt_resource_data = b"\ \x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ \x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ \x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ -\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ -\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ -\x22\x20\x64\x3d\x22\x4d\x35\x20\x31\x30\x43\x35\x20\x38\x2e\x38\ -\x39\x35\x34\x33\x20\x35\x2e\x38\x39\x35\x34\x33\x20\x38\x20\x37\ -\x20\x38\x4c\x34\x31\x20\x38\x43\x34\x32\x2e\x31\x30\x34\x36\x20\ -\x38\x20\x34\x33\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x33\x20\ -\x31\x30\x56\x33\x38\x43\x34\x33\x20\x33\x39\x2e\x31\x30\x34\x36\ -\x20\x34\x32\x2e\x31\x30\x34\x36\x20\x34\x30\x20\x34\x31\x20\x34\ -\x30\x48\x37\x43\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\x35\ -\x20\x33\x39\x2e\x31\x30\x34\x36\x20\x35\x20\x33\x38\x56\x31\x30\ -\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\ -\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\ -\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ -\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\ -\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\ -\x63\x6c\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\ -\x64\x64\x22\x20\x64\x3d\x22\x4d\x31\x34\x2e\x35\x20\x31\x38\x43\ -\x31\x35\x2e\x33\x32\x38\x34\x20\x31\x38\x20\x31\x36\x20\x31\x37\ -\x2e\x33\x32\x38\x34\x20\x31\x36\x20\x31\x36\x2e\x35\x43\x31\x36\ -\x20\x31\x35\x2e\x36\x37\x31\x36\x20\x31\x35\x2e\x33\x32\x38\x34\ -\x20\x31\x35\x20\x31\x34\x2e\x35\x20\x31\x35\x43\x31\x33\x2e\x36\ -\x37\x31\x36\x20\x31\x35\x20\x31\x33\x20\x31\x35\x2e\x36\x37\x31\ -\x36\x20\x31\x33\x20\x31\x36\x2e\x35\x43\x31\x33\x20\x31\x37\x2e\ -\x33\x32\x38\x34\x20\x31\x33\x2e\x36\x37\x31\x36\x20\x31\x38\x20\ -\x31\x34\x2e\x35\x20\x31\x38\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\ -\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ -\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\ -\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x20\x32\x34\x4c\x32\x30\x20\ -\x32\x38\x4c\x32\x36\x20\x32\x31\x4c\x34\x33\x20\x33\x34\x56\x33\ -\x38\x43\x34\x33\x20\x33\x39\x2e\x31\x30\x34\x36\x20\x34\x32\x2e\ -\x31\x30\x34\x36\x20\x34\x30\x20\x34\x31\x20\x34\x30\x48\x37\x43\ -\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\x35\x20\x33\x39\x2e\ -\x31\x30\x34\x36\x20\x35\x20\x33\x38\x56\x33\x34\x4c\x31\x35\x20\ -\x32\x34\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ -\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ -\x67\x3e\ -\x00\x00\x07\x34\ -\x89\ -\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ -\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ -\x00\x00\x06\xfb\x49\x44\x41\x54\x78\x9c\xbd\x97\xd9\x6f\x54\xd7\ -\x1d\xc7\x3f\xbf\x73\x97\x99\xb9\xb3\xda\x63\x83\xed\x02\x9e\x00\ -\x06\x1c\x11\x50\x51\xa5\x56\x50\x4a\x89\xca\xd2\x54\x7d\x49\xab\ -\xfc\x01\x7d\x05\xe5\x21\xad\xd4\x4d\x91\x92\xb4\x59\xa4\xaa\x55\ -\x1f\x4b\x94\xb6\xa9\xaa\x44\xa9\xd4\x2a\x0f\x25\x65\xab\x4a\xa5\ -\xaa\x25\x11\x09\x38\x1b\x60\x3b\x08\x1c\xcc\x18\xef\x9e\x31\xe3\ -\x99\xb9\xcb\x39\x7d\x98\xb1\x59\x6c\xc0\xa6\x52\xcf\xc3\x79\xb8\ -\xcb\xf9\x7d\xee\xef\xf7\xfd\x2d\x57\x8a\xc5\xa2\xe1\x21\x97\x34\ -\x37\xf3\xd0\x27\x80\xfa\x5f\x8c\x6b\x84\x20\x02\x25\xff\x67\x00\ -\x91\xc6\x76\xb4\x7f\x8a\x23\x67\x8b\x94\x7d\x83\xf5\x90\x14\x2b\ -\x06\x10\x01\x41\x38\x36\x38\xcd\xf1\x81\x51\x3e\x1a\x99\xe1\x8d\ -\xbe\x1b\x94\xeb\xfa\xa1\x20\x56\x04\x20\xcd\xfd\x9d\xc1\x69\x8e\ -\x5d\x1a\xc3\xb3\x2d\x32\x71\x97\x8b\x63\xb3\xbc\xf1\xe1\x28\xb3\ -\xbe\x5e\x71\x38\x96\x0d\x20\x02\x22\xc2\xdf\x9a\xc6\x6d\x4b\x9a\ -\x02\x34\x58\x22\x7c\x72\xa3\xcc\x1f\xfb\x46\xb9\x19\xac\x2c\x1c\ -\xf6\x72\x8d\x1b\x23\xd4\x35\x04\xda\x60\xa9\x79\x6f\x40\x2a\xe6\ -\x90\xf3\x62\x94\xab\x3e\xb5\x50\x13\x68\xa1\x1e\x1a\x62\xb6\x10\ -\xe9\x07\xa7\xc7\x03\x01\x54\x53\x70\xaf\x9d\x1d\xe6\x7a\xb9\xc6\ -\xf7\xbf\xb6\x81\xb4\xeb\x70\xae\x58\x26\xef\xc5\xf8\x42\x2e\xc1\ -\xae\x42\x0b\x7e\x18\x62\x99\x88\xb7\xce\x5f\xa3\xaf\x58\xe2\xa5\ -\x83\x5b\xc8\x7b\x0e\xe1\x03\x20\xee\x0b\xa0\x9a\x6e\x7f\xfd\x83\ -\x22\x6f\x9e\xbb\x86\x12\x21\xe9\xda\x1c\xde\x59\x60\x4b\x47\x8e\ -\x44\xcc\xa1\xd5\x0e\x41\xfb\x88\x6b\xd0\x58\x0c\x95\x7c\xfe\xfa\ -\x71\x91\x30\xd2\xbc\xf4\xcd\x5e\x56\x25\x1d\xa2\xfb\x30\xdc\x53\ -\x03\xf3\xc6\x7f\xff\x7e\x91\xb7\xfa\xae\xd3\x99\x49\xe0\x58\xc2\ -\xb1\x8b\x23\xfc\xfa\x5f\x57\x68\x75\x23\xda\x9c\x00\x1d\x05\x68\ -\x63\x88\x34\x58\x68\x9e\xde\xbd\x91\x6d\x6b\xf2\xbc\x37\x34\xcd\ -\xd0\x4c\x1d\xc4\x5a\xb9\x07\xe6\xd5\xfe\x87\x73\x45\x5e\x3f\x3b\ -\xc4\xea\x74\x1c\x43\x43\x70\x18\x38\x7e\x69\x04\x2f\x66\x73\xe8\ -\x2b\x6b\xb1\x00\xd3\x7c\x27\xd2\x86\x42\xab\xc7\xe1\x3d\x5b\x30\ -\x61\x95\xad\x5d\x39\x2e\xcc\x68\xda\xe3\x8a\xb6\x98\x66\xa9\x68\ -\x2c\x02\x10\xc0\xb2\x14\x6f\xf6\xdd\xe0\xd5\x33\x57\xc9\x26\x6c\ -\x5c\xdb\x62\x6c\x76\x0e\x03\x28\x05\xf5\x40\x33\x5b\x8b\x40\x14\ -\x98\x08\x03\xd4\xb5\x10\xb7\xa0\x5a\xad\xf2\xad\x9e\x2c\x35\x9d\ -\xe5\x72\xc9\x50\xf1\x35\x73\x01\x28\x51\xb4\xb8\x7a\x51\xd9\x56\ -\x77\x1b\x17\x11\x8c\x81\xae\x6c\x9c\x8e\x6c\x02\x85\x30\x71\xb3\ -\x06\x40\xd2\x75\xf0\x43\xc3\xd6\x8e\x1c\xdf\xfb\x52\x17\x0e\x1a\ -\x03\x58\x4a\xf1\xf6\xa7\xa3\xfc\xee\x83\x22\x4a\x84\x5a\x64\xb8\ -\x5c\xd2\x54\x83\x08\xab\x79\xde\xd5\xb2\x66\xda\x57\xdc\x9d\xa0\ -\x0b\x00\x4a\x1a\x07\x1d\x79\xef\x73\x5e\x39\x7d\x99\x3d\x85\x16\ -\x7e\xb4\x77\x13\xe9\x84\x4b\xc5\x0f\x50\x4a\xa8\x06\x11\x8f\x75\ -\xb5\xf0\xec\xbe\xcd\xac\x6f\x4b\xa3\x9b\xef\x5a\x96\xc5\x81\x9e\ -\x3c\x8f\xad\x4a\x12\x18\x61\x2a\x70\x88\xf4\xad\x60\xaa\xdb\x21\ -\x02\xd5\x28\xe5\xb7\x87\x40\x04\x94\x52\xbc\xfa\xee\x10\xbf\x3c\ -\x3d\x88\xa5\x04\x2f\x66\xf3\xcc\xee\x02\x87\x76\x16\x78\xf9\x1f\ -\x03\xcc\xd6\x02\x1e\xed\xc8\xf1\xd3\xc7\x37\xd0\xdd\x12\x27\x52\ -\x36\x50\x07\xc0\x88\xa2\xb3\x25\xcd\xea\x94\x8b\x1f\x69\xf2\x09\ -\x8b\x50\xbb\x8c\xce\xd6\x40\x64\xc1\xb3\xda\x18\xae\x96\x35\x64\ -\x14\x2d\x4e\xc3\x7b\xf6\xbc\xe0\x7e\x73\x66\x88\x5f\x9c\x1e\xc0\ -\xb1\x14\x49\xd7\xe6\x4f\xe7\xae\xa1\xb5\xe1\x99\xdd\x05\x82\x3d\ -\x3d\x1c\xef\x9f\xe0\xd0\xce\x75\xcc\xd4\x0d\x93\xc5\x0a\xdb\x3a\ -\x74\x43\x94\x40\xe8\xd7\x51\xb1\x38\xa1\x06\x8c\xc6\x26\xc4\x20\ -\x8b\xca\xb2\x6a\x42\x5c\x29\x6b\xac\xac\x22\x63\xeb\x86\x07\x42\ -\x6d\x18\x98\x98\x23\xd4\x86\xa4\xab\xc8\xc6\x5d\x26\x2b\x35\x7e\ -\xfb\xee\x15\x8c\x31\x1c\xde\x55\x60\x57\x77\x8e\xfe\xc9\x3a\xa7\ -\x06\xa7\x30\x18\xe2\x4e\x3b\x9b\x5a\x9d\x85\x6a\xe7\xd7\x6b\x0b\ -\x4a\x72\x88\x50\x34\xb2\x43\x35\x23\x31\x2f\x3e\x11\xc1\xb3\x15\ -\x8e\x6a\x5c\x50\x06\x70\x94\xf0\xdc\xbe\x1e\xbe\xb3\x7d\x2d\xf5\ -\x50\xe3\x47\x11\x31\xa7\x91\x60\xe7\x8b\x65\x6e\xfa\x9a\xc1\xa9\ -\x1a\x7f\xff\x6c\x8a\x48\x6b\xea\x41\xc4\x85\xf1\x1a\xb6\x1b\xbf\ -\x23\x9e\x8e\x12\xae\x94\x7c\x5e\x3b\x3b\x4c\xce\x0e\xe9\x4a\x3b\ -\x0b\xf7\x45\x40\x1b\xf0\x1c\x45\x21\x6d\x48\x28\x7d\x0b\x50\x1b\ -\x43\x26\x66\xf1\xfc\xfe\x4d\x3c\xf5\xc5\xb5\x44\x06\x6e\xd6\x02\ -\x7a\x57\xe7\x78\x6e\xdf\x66\x6e\x54\x42\x4e\x0e\x4e\xe3\x47\x51\ -\x43\x74\x4a\x18\x9e\xa9\x70\xbd\x02\xb6\x13\x6f\xe8\x00\x98\xac\ -\x43\x4d\x5b\x1c\xbb\x38\xca\xf3\x27\x2e\xd1\xea\x44\x74\xa4\x6d\ -\x84\x86\x07\x92\x6e\xc3\x78\xbc\x69\x7c\x41\x84\xf3\x61\x48\xb9\ -\x8a\x1f\x3f\xbe\x11\x5f\x1b\x06\xc6\x66\x79\xf1\x89\x5e\x02\x23\ -\x9c\x18\x98\x24\x08\x23\x94\x08\x8e\xa5\xb0\x2d\xa1\x54\xf5\x39\ -\xfa\xf1\x30\x4f\x6e\x5f\xc3\x2a\xc7\x22\x34\x50\x0d\x2c\xde\xbf\ -\x36\x41\xa9\x5a\xe7\xed\xcf\x27\xc8\x25\x5c\x7e\xb0\x77\x03\x26\ -\x65\x53\xaa\x6b\xba\x53\x86\xb8\xdc\x32\x7e\x07\x00\x34\x2a\x59\ -\xc2\x16\x7e\xb8\x67\x3d\x73\x81\xa6\xec\xc3\xf1\x81\x49\xfc\xa6\ -\x71\x03\x44\xc6\xb0\x2e\x9b\xe4\x91\x7c\x8a\x52\xd5\x27\xf4\xab\ -\x9c\x1c\x9a\x61\x6d\x4b\x8a\xf6\x5c\x0a\xad\x35\x63\xe5\x2a\x71\ -\x47\x71\xf4\xc2\x08\x22\xf0\xf4\x57\xd7\xd3\x9e\xb1\xb0\xcc\x9d\ -\xc6\x17\x01\xcc\x43\xb4\xc4\x6d\x5a\x12\xc2\xa7\x97\x67\xa9\x06\ -\x11\xb6\x9a\xcf\x15\xb0\x95\x50\xc8\x67\xd8\xd1\x95\x24\x0a\xaa\ -\x68\xad\x39\xd1\x3f\x4e\xdf\xf5\x41\xbe\xde\xb3\x9a\x81\xb1\x32\ -\x91\x36\x64\xe2\x2e\x4a\x09\xe7\x86\x4b\x94\x6b\x21\x19\x57\x08\ -\x97\x53\x8a\x69\x7e\xa5\x60\xd8\xdd\x9d\xa4\x1a\x44\xf4\x8f\x57\ -\x40\x1a\x7d\x20\xe9\x3a\x24\x5d\x81\xa8\x86\x32\x11\x96\x52\xbc\ -\xb0\x7f\x13\xe7\x47\x2a\x1c\x39\x73\x85\x7f\x5f\x1e\x43\x04\x82\ -\xc8\xb0\x2e\x9b\xe0\x67\x07\x7b\x59\x93\xb9\x77\x5b\xbe\x67\x37\ -\x34\x06\xe2\x16\x1c\xd8\x98\x61\x6b\x47\x9a\x50\x1b\x82\xc8\x90\ -\x8a\xd9\x74\xa7\x05\xc7\xb2\x40\x2c\x22\xad\xb1\xc5\xb0\x6b\x5d\ -\x8a\x23\x4f\xed\xe0\x57\xdf\xfd\x32\x31\xdb\x22\x1b\xb7\x79\xe1\ -\xc0\x16\xb6\x75\x78\xf7\x9d\x09\xee\x3b\x0f\x68\x03\x31\x0b\xf6\ -\x16\x52\x54\x83\x90\xa9\x6a\xc8\xbe\x8d\x39\xfe\xf9\xd9\x38\x99\ -\xa4\xc7\xfe\x8d\x79\xca\x95\x4a\xa3\x19\x85\x1a\x5b\x05\x3c\xb9\ -\x75\x35\x9e\xb5\x83\xd6\xb8\x62\x7b\x47\x8a\x5a\x10\xa1\x94\x5a\ -\x28\x5a\x77\x2f\x59\xce\x8f\x89\x12\x98\x0b\x0d\xbe\x16\xfe\x73\ -\x75\x8a\x9f\xbc\xf3\x09\x9e\x63\xf1\xca\xb7\xb7\xb1\xf7\x91\x2c\ -\x41\xa4\x17\x9e\x15\xc0\xb6\x14\x88\x30\x3a\x36\xce\xe4\xc4\x24\ -\x6d\xed\x6d\xe4\xdb\xf2\x60\x40\x6b\x8d\x8e\x34\x06\x83\x6d\xdb\ -\xcb\x9b\x09\xb5\x01\xcf\x16\x62\x46\x38\x35\x30\x4e\xa9\xea\xe3\ -\x87\x8a\x9f\x9f\xbc\x88\x1c\x78\x94\x3d\x85\x0c\x61\x13\xc2\x00\ -\x41\xa4\x51\x4a\x51\xaf\xd5\x71\x5d\x97\x99\xe9\x19\x6c\xdb\x26\ -\x0c\x43\x8c\x36\x4c\x4f\x4f\x03\xb0\xae\x7b\xdd\xf2\xa7\x62\x6d\ -\x1a\x8d\xe3\xd9\x6f\xf4\x70\xb0\xb7\x13\x3f\xd4\x0c\x4d\xcd\xf2\ -\x97\x8f\x46\xb8\x51\x65\xc9\x49\x58\x6b\x4d\x18\x86\x64\xb3\x59\ -\xca\xa5\x32\xa5\x52\x89\x89\xf1\x09\x8c\x31\x04\x41\xc0\x40\xff\ -\xc0\xca\xfe\x0b\x22\x63\x68\x4b\x58\xbc\xfc\x44\x2f\xfb\xb6\x74\ -\x11\x6a\xc3\xa9\x8b\xc3\xfc\xf9\xc3\x22\xb2\xc4\xe8\xe5\xba\x2e\ -\x9d\x5d\x9d\x78\x9e\xb7\x00\x64\x3b\x36\x9e\xe7\xe1\x79\x1e\x8e\ -\xe3\x2c\x2f\x04\xb7\xaf\x50\x1b\xf2\x71\x8b\x17\x0f\x6e\x26\xd4\ -\x9a\xfe\xb1\x59\x76\x74\x7a\x18\x13\x2d\xfa\xfa\xd6\x7c\x2b\x4a\ -\x29\x2c\xcb\x22\x0c\x43\xfc\xc0\x27\x9d\x4e\x23\xb7\x35\x90\x65\ -\x89\x70\xa9\x65\x2b\x61\xac\x12\x32\x55\x0d\xe8\x6d\x4f\x10\x2c\ -\x31\xfa\x8a\x08\x41\x10\x30\x37\x37\x47\x32\x99\xc4\x71\x9c\x45\ -\xd9\xf0\xd0\x00\x00\x96\x08\x22\x2c\x99\xe7\x22\xc2\xdc\xdc\x1c\ -\xc5\xeb\x45\x5c\xd7\xc5\x75\x5d\x3a\x3a\x3b\x16\x01\xfc\x17\x8c\ -\x9f\x4a\xdc\x4b\xe3\x48\xf3\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ -\x42\x60\x82\ -\x00\x00\x05\xa3\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\ -\x2e\x32\x38\x33\x38\x20\x34\x33\x2e\x31\x37\x31\x33\x43\x31\x34\ -\x2e\x39\x33\x32\x37\x20\x34\x32\x2e\x31\x37\x33\x36\x20\x31\x31\ -\x2e\x39\x34\x39\x38\x20\x34\x30\x2e\x33\x32\x31\x33\x20\x39\x2e\ -\x35\x38\x37\x38\x37\x20\x33\x37\x2e\x38\x36\x37\x43\x31\x30\x2e\ -\x34\x36\x39\x20\x33\x36\x2e\x38\x32\x32\x37\x20\x31\x31\x20\x33\ -\x35\x2e\x34\x37\x33\x34\x20\x31\x31\x20\x33\x34\x2e\x30\x30\x30\ -\x31\x43\x31\x31\x20\x33\x30\x2e\x36\x38\x36\x34\x20\x38\x2e\x33\ -\x31\x33\x37\x31\x20\x32\x38\x2e\x30\x30\x30\x31\x20\x35\x20\x32\ -\x38\x2e\x30\x30\x30\x31\x43\x34\x2e\x37\x39\x39\x35\x35\x20\x32\ -\x38\x2e\x30\x30\x30\x31\x20\x34\x2e\x36\x30\x31\x33\x39\x20\x32\ -\x38\x2e\x30\x31\x20\x34\x2e\x34\x30\x35\x39\x39\x20\x32\x38\x2e\ -\x30\x32\x39\x32\x43\x34\x2e\x31\x33\x39\x37\x39\x20\x32\x36\x2e\ -\x37\x32\x37\x37\x20\x34\x20\x32\x35\x2e\x33\x38\x30\x33\x20\x34\ -\x20\x32\x34\x2e\x30\x30\x30\x31\x43\x34\x20\x32\x31\x2e\x39\x30\ -\x39\x35\x20\x34\x2e\x33\x32\x30\x37\x37\x20\x31\x39\x2e\x38\x39\ -\x33\x38\x20\x34\x2e\x39\x31\x35\x37\x39\x20\x31\x37\x2e\x39\x39\ -\x39\x35\x43\x34\x2e\x39\x34\x33\x38\x31\x20\x31\x37\x2e\x39\x39\ -\x39\x39\x20\x34\x2e\x39\x37\x31\x38\x38\x20\x31\x38\x2e\x30\x30\ -\x30\x31\x20\x35\x20\x31\x38\x2e\x30\x30\x30\x31\x43\x38\x2e\x33\ -\x31\x33\x37\x31\x20\x31\x38\x2e\x30\x30\x30\x31\x20\x31\x31\x20\ -\x31\x35\x2e\x33\x31\x33\x38\x20\x31\x31\x20\x31\x32\x2e\x30\x30\ -\x30\x31\x43\x31\x31\x20\x31\x31\x2e\x30\x34\x38\x38\x20\x31\x30\ -\x2e\x37\x37\x38\x36\x20\x31\x30\x2e\x31\x34\x39\x33\x20\x31\x30\ -\x2e\x33\x38\x34\x36\x20\x39\x2e\x33\x35\x30\x31\x31\x43\x31\x32\ -\x2e\x36\x39\x37\x35\x20\x37\x2e\x31\x39\x39\x35\x20\x31\x35\x2e\ -\x35\x32\x30\x35\x20\x35\x2e\x35\x39\x30\x30\x32\x20\x31\x38\x2e\ -\x36\x35\x32\x31\x20\x34\x2e\x37\x32\x33\x31\x34\x43\x31\x39\x2e\ -\x36\x34\x34\x34\x20\x36\x2e\x36\x36\x38\x31\x39\x20\x32\x31\x2e\ -\x36\x36\x36\x37\x20\x38\x2e\x30\x30\x30\x31\x33\x20\x32\x34\x20\ -\x38\x2e\x30\x30\x30\x31\x33\x43\x32\x36\x2e\x33\x33\x33\x33\x20\ -\x38\x2e\x30\x30\x30\x31\x33\x20\x32\x38\x2e\x33\x35\x35\x36\x20\ -\x36\x2e\x36\x36\x38\x31\x39\x20\x32\x39\x2e\x33\x34\x37\x39\x20\ -\x34\x2e\x37\x32\x33\x31\x34\x43\x33\x32\x2e\x34\x37\x39\x35\x20\ -\x35\x2e\x35\x39\x30\x30\x32\x20\x33\x35\x2e\x33\x30\x32\x35\x20\ -\x37\x2e\x31\x39\x39\x35\x20\x33\x37\x2e\x36\x31\x35\x34\x20\x39\ -\x2e\x33\x35\x30\x31\x31\x43\x33\x37\x2e\x32\x32\x31\x34\x20\x31\ -\x30\x2e\x31\x34\x39\x33\x20\x33\x37\x20\x31\x31\x2e\x30\x34\x38\ -\x38\x20\x33\x37\x20\x31\x32\x2e\x30\x30\x30\x31\x43\x33\x37\x20\ -\x31\x35\x2e\x33\x31\x33\x38\x20\x33\x39\x2e\x36\x38\x36\x33\x20\ -\x31\x38\x2e\x30\x30\x30\x31\x20\x34\x33\x20\x31\x38\x2e\x30\x30\ -\x30\x31\x43\x34\x33\x2e\x30\x32\x38\x31\x20\x31\x38\x2e\x30\x30\ -\x30\x31\x20\x34\x33\x2e\x30\x35\x36\x32\x20\x31\x37\x2e\x39\x39\ -\x39\x39\x20\x34\x33\x2e\x30\x38\x34\x32\x20\x31\x37\x2e\x39\x39\ -\x39\x35\x43\x34\x33\x2e\x36\x37\x39\x32\x20\x31\x39\x2e\x38\x39\ -\x33\x38\x20\x34\x34\x20\x32\x31\x2e\x39\x30\x39\x35\x20\x34\x34\ -\x20\x32\x34\x2e\x30\x30\x30\x31\x43\x34\x34\x20\x32\x35\x2e\x33\ -\x38\x30\x33\x20\x34\x33\x2e\x38\x36\x30\x32\x20\x32\x36\x2e\x37\ -\x32\x37\x37\x20\x34\x33\x2e\x35\x39\x34\x20\x32\x38\x2e\x30\x32\ -\x39\x32\x43\x34\x33\x2e\x33\x39\x38\x36\x20\x32\x38\x2e\x30\x31\ -\x20\x34\x33\x2e\x32\x30\x30\x35\x20\x32\x38\x2e\x30\x30\x30\x31\ -\x20\x34\x33\x20\x32\x38\x2e\x30\x30\x30\x31\x43\x33\x39\x2e\x36\ -\x38\x36\x33\x20\x32\x38\x2e\x30\x30\x30\x31\x20\x33\x37\x20\x33\ -\x30\x2e\x36\x38\x36\x34\x20\x33\x37\x20\x33\x34\x2e\x30\x30\x30\ -\x31\x43\x33\x37\x20\x33\x35\x2e\x34\x37\x33\x34\x20\x33\x37\x2e\ -\x35\x33\x31\x20\x33\x36\x2e\x38\x32\x32\x37\x20\x33\x38\x2e\x34\ -\x31\x32\x31\x20\x33\x37\x2e\x38\x36\x37\x43\x33\x36\x2e\x30\x35\ -\x30\x32\x20\x34\x30\x2e\x33\x32\x31\x33\x20\x33\x33\x2e\x30\x36\ -\x37\x33\x20\x34\x32\x2e\x31\x37\x33\x36\x20\x32\x39\x2e\x37\x31\ -\x36\x32\x20\x34\x33\x2e\x31\x37\x31\x33\x43\x32\x38\x2e\x39\x34\ -\x32\x38\x20\x34\x30\x2e\x37\x35\x32\x20\x32\x36\x2e\x36\x37\x36\ -\x20\x33\x39\x2e\x30\x30\x30\x31\x20\x32\x34\x20\x33\x39\x2e\x30\ -\x30\x30\x31\x43\x32\x31\x2e\x33\x32\x34\x20\x33\x39\x2e\x30\x30\ -\x30\x31\x20\x31\x39\x2e\x30\x35\x37\x32\x20\x34\x30\x2e\x37\x35\ -\x32\x20\x31\x38\x2e\x32\x38\x33\x38\x20\x34\x33\x2e\x31\x37\x31\ -\x33\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ -\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ -\x20\x64\x3d\x22\x4d\x32\x34\x20\x33\x31\x43\x32\x37\x2e\x38\x36\ -\x36\x20\x33\x31\x20\x33\x31\x20\x32\x37\x2e\x38\x36\x36\x20\x33\ -\x31\x20\x32\x34\x43\x33\x31\x20\x32\x30\x2e\x31\x33\x34\x20\x32\ -\x37\x2e\x38\x36\x36\x20\x31\x37\x20\x32\x34\x20\x31\x37\x43\x32\ -\x30\x2e\x31\x33\x34\x20\x31\x37\x20\x31\x37\x20\x32\x30\x2e\x31\ -\x33\x34\x20\x31\x37\x20\x32\x34\x43\x31\x37\x20\x32\x37\x2e\x38\ -\x36\x36\x20\x32\x30\x2e\x31\x33\x34\x20\x33\x31\x20\x32\x34\x20\ -\x33\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ -\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ -\x67\x3e\ -\x00\x00\x01\xd2\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ -\x2e\x30\x30\x38\x33\x20\x33\x33\x2e\x38\x39\x39\x35\x56\x36\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x36\ -\x20\x32\x32\x4c\x32\x34\x20\x33\x34\x4c\x31\x32\x20\x32\x32\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x36\ -\x20\x34\x32\x48\x31\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ -\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\ -\x3e\ -\x00\x00\x02\x71\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x39\ -\x2e\x33\x20\x36\x48\x38\x2e\x37\x43\x37\x2e\x32\x30\x38\x38\x33\ -\x20\x36\x20\x36\x20\x37\x2e\x32\x30\x38\x38\x33\x20\x36\x20\x38\ -\x2e\x37\x56\x33\x39\x2e\x33\x43\x36\x20\x34\x30\x2e\x37\x39\x31\ -\x32\x20\x37\x2e\x32\x30\x38\x38\x33\x20\x34\x32\x20\x38\x2e\x37\ -\x20\x34\x32\x48\x33\x39\x2e\x33\x43\x34\x30\x2e\x37\x39\x31\x32\ -\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x37\x39\x31\x32\x20\x34\ -\x32\x20\x33\x39\x2e\x33\x56\x38\x2e\x37\x43\x34\x32\x20\x37\x2e\ -\x32\x30\x38\x38\x33\x20\x34\x30\x2e\x37\x39\x31\x32\x20\x36\x20\ -\x33\x39\x2e\x33\x20\x36\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\ -\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\ -\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\ -\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\x20\x36\x56\x32\ -\x34\x48\x31\x35\x56\x36\x48\x33\x32\x5a\x22\x20\x66\x69\x6c\x6c\ -\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x36\x20\ -\x31\x33\x56\x31\x37\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\x2e\x39\ -\x39\x37\x31\x20\x36\x48\x33\x35\x2e\x39\x39\x38\x36\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ -\x22\x73\x71\x75\x61\x72\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\ +\x20\x34\x34\x48\x33\x38\x43\x33\x39\x2e\x31\x30\x34\x36\x20\x34\ +\x34\x20\x34\x30\x20\x34\x33\x2e\x31\x30\x34\x36\x20\x34\x30\x20\ +\x34\x32\x56\x31\x34\x48\x33\x30\x56\x34\x48\x31\x30\x43\x38\x2e\ +\x38\x39\x35\x34\x33\x20\x34\x20\x38\x20\x34\x2e\x38\x39\x35\x34\ +\x33\x20\x38\x20\x36\x56\x34\x32\x43\x38\x20\x34\x33\x2e\x31\x30\ +\x34\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x34\x20\x31\x30\ +\x20\x34\x34\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ +\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ +\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ +\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\ +\x30\x20\x34\x4c\x34\x30\x20\x31\x34\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ +\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ +\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x33\x35\x4c\x33\x31\ +\x20\x32\x35\x4c\x32\x37\x20\x32\x31\x4c\x31\x37\x20\x33\x31\x56\ +\x33\x35\x48\x32\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\ +\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ +\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ +\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ \x00\x00\x0a\x2e\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ @@ -946,7 +255,7 @@ qt_resource_data = b"\ \xa3\x2c\x5c\x99\x27\xe7\x4b\xcc\x56\x9b\xa8\x58\xc4\x2f\x74\x12\ \xb6\x17\x85\x5c\xf8\x93\xe7\xf9\x1f\xac\x72\xde\x5a\x34\x54\x73\ \x75\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ -\x00\x00\x01\xe3\ +\x00\x00\x03\x09\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ \x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ @@ -956,29 +265,212 @@ qt_resource_data = b"\ \x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ \x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ \x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ -\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ -\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ -\x22\x20\x64\x3d\x22\x4d\x34\x20\x34\x30\x2e\x38\x33\x36\x31\x43\ -\x38\x2e\x38\x39\x33\x30\x37\x20\x33\x34\x2e\x38\x36\x33\x32\x20\ -\x31\x33\x2e\x32\x33\x38\x33\x20\x33\x31\x2e\x34\x37\x33\x39\x20\ -\x31\x37\x2e\x30\x33\x35\x36\x20\x33\x30\x2e\x36\x36\x38\x32\x43\ -\x32\x30\x2e\x38\x33\x32\x39\x20\x32\x39\x2e\x38\x36\x32\x35\x20\ -\x32\x34\x2e\x34\x34\x38\x33\x20\x32\x39\x2e\x37\x34\x30\x38\x20\ -\x32\x37\x2e\x38\x38\x31\x38\x20\x33\x30\x2e\x33\x30\x33\x56\x34\ -\x31\x4c\x34\x34\x20\x32\x33\x2e\x35\x34\x35\x33\x4c\x32\x37\x2e\ -\x38\x38\x31\x38\x20\x37\x56\x31\x37\x2e\x31\x36\x37\x43\x32\x31\ -\x2e\x35\x33\x33\x33\x20\x31\x37\x2e\x32\x31\x37\x32\x20\x31\x36\ -\x2e\x31\x33\x36\x32\x20\x31\x39\x2e\x34\x39\x34\x38\x20\x31\x31\ -\x2e\x36\x39\x30\x35\x20\x32\x34\x43\x37\x2e\x32\x34\x34\x37\x34\ -\x20\x32\x38\x2e\x35\x30\x35\x32\x20\x34\x2e\x36\x38\x31\x32\x36\ -\x20\x33\x34\x2e\x31\x31\x37\x32\x20\x34\x20\x34\x30\x2e\x38\x33\ -\x36\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\ +\x31\x30\x4c\x38\x20\x31\x33\x4c\x31\x34\x20\x37\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ +\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ +\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x32\x34\x4c\ +\x38\x20\x32\x37\x4c\x31\x34\x20\x32\x31\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ +\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ +\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x33\x38\x4c\x38\x20\ +\x34\x31\x4c\x31\x34\x20\x33\x35\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\ +\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ +\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\ +\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x32\x34\x48\x34\x33\x22\ \x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ \x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ -\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ -\x67\x3e\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ +\x20\x33\x38\x48\x34\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ +\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ +\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x30\x48\x34\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ +\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ +\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x03\xca\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\ +\x39\x43\x36\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x37\x2e\x33\x34\ +\x33\x31\x35\x20\x36\x20\x39\x20\x36\x48\x33\x34\x2e\x32\x38\x31\ +\x34\x4c\x34\x32\x20\x31\x33\x2e\x32\x30\x36\x35\x56\x33\x39\x43\ +\x34\x32\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x34\x30\x2e\x36\x35\ +\x36\x39\x20\x34\x32\x20\x33\x39\x20\x34\x32\x48\x39\x43\x37\x2e\ +\x33\x34\x33\x31\x35\x20\x34\x32\x20\x36\x20\x34\x30\x2e\x36\x35\ +\x36\x39\x20\x36\x20\x33\x39\x56\x39\x5a\x22\x20\x66\x69\x6c\x6c\ +\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\x75\ +\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\x69\ +\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\ +\x20\x64\x3d\x22\x4d\x32\x34\x2e\x30\x30\x38\x33\x20\x36\x4c\x32\ +\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x43\x32\x34\x20\x31\x33\x2e\ +\x37\x32\x34\x35\x20\x32\x33\x2e\x35\x35\x32\x33\x20\x31\x34\x20\ +\x32\x33\x20\x31\x34\x48\x31\x35\x43\x31\x34\x2e\x34\x34\x37\x37\ +\x20\x31\x34\x20\x31\x34\x20\x31\x33\x2e\x37\x32\x34\x35\x20\x31\ +\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x4c\x31\x34\x20\x36\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x2f\x3e\x3c\x70\x61\ +\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x2e\x30\x30\x38\x33\x20\x36\ +\x4c\x32\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x43\x32\x34\x20\x31\ +\x33\x2e\x37\x32\x34\x35\x20\x32\x33\x2e\x35\x35\x32\x33\x20\x31\ +\x34\x20\x32\x33\x20\x31\x34\x48\x31\x35\x43\x31\x34\x2e\x34\x34\ +\x37\x37\x20\x31\x34\x20\x31\x34\x20\x31\x33\x2e\x37\x32\x34\x35\ +\x20\x31\x34\x20\x31\x33\x2e\x33\x38\x34\x36\x4c\x31\x34\x20\x36\ +\x48\x32\x34\x2e\x30\x30\x38\x33\x5a\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ +\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\ +\x39\x20\x36\x48\x33\x34\x2e\x32\x38\x31\x34\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\x20\x32\x36\x48\ +\x33\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ +\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ +\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ +\x4d\x31\x34\x20\x33\x34\x48\x32\x34\x2e\x30\x30\x38\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ +\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x03\xee\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x38\x20\ +\x32\x38\x43\x31\x30\x2e\x32\x30\x39\x31\x20\x32\x38\x20\x31\x32\ +\x20\x32\x36\x2e\x32\x30\x39\x31\x20\x31\x32\x20\x32\x34\x43\x31\ +\x32\x20\x32\x31\x2e\x37\x39\x30\x39\x20\x31\x30\x2e\x32\x30\x39\ +\x31\x20\x32\x30\x20\x38\x20\x32\x30\x43\x35\x2e\x37\x39\x30\x38\ +\x36\x20\x32\x30\x20\x34\x20\x32\x31\x2e\x37\x39\x30\x39\x20\x34\ +\x20\x32\x34\x43\x34\x20\x32\x36\x2e\x32\x30\x39\x31\x20\x35\x2e\ +\x37\x39\x30\x38\x36\x20\x32\x38\x20\x38\x20\x32\x38\x5a\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ +\x4d\x38\x20\x31\x32\x43\x39\x2e\x31\x30\x34\x35\x37\x20\x31\x32\ +\x20\x31\x30\x20\x31\x31\x2e\x31\x30\x34\x36\x20\x31\x30\x20\x31\ +\x30\x43\x31\x30\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x39\x2e\x31\ +\x30\x34\x35\x37\x20\x38\x20\x38\x20\x38\x43\x36\x2e\x38\x39\x35\ +\x34\x33\x20\x38\x20\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x36\ +\x20\x31\x30\x43\x36\x20\x31\x31\x2e\x31\x30\x34\x36\x20\x36\x2e\ +\x38\x39\x35\x34\x33\x20\x31\x32\x20\x38\x20\x31\x32\x5a\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ +\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ +\x20\x64\x3d\x22\x4d\x38\x20\x34\x30\x43\x39\x2e\x31\x30\x34\x35\ +\x37\x20\x34\x30\x20\x31\x30\x20\x33\x39\x2e\x31\x30\x34\x36\x20\ +\x31\x30\x20\x33\x38\x43\x31\x30\x20\x33\x36\x2e\x38\x39\x35\x34\ +\x20\x39\x2e\x31\x30\x34\x35\x37\x20\x33\x36\x20\x38\x20\x33\x36\ +\x43\x36\x2e\x38\x39\x35\x34\x33\x20\x33\x36\x20\x36\x20\x33\x36\ +\x2e\x38\x39\x35\x34\x20\x36\x20\x33\x38\x43\x36\x20\x33\x39\x2e\ +\x31\x30\x34\x36\x20\x36\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\ +\x38\x20\x34\x30\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ +\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x32\ +\x34\x48\x34\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ +\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ +\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ +\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ +\x3d\x22\x4d\x32\x30\x20\x33\x38\x48\x34\x34\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x30\x20\x31\x30\x48\ +\x34\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ +\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ +\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x02\x2b\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\ +\x20\x36\x48\x39\x43\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x36\ +\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x39\x56\x33\x39\x43\ +\x36\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x37\x2e\x33\x34\x33\x31\ +\x35\x20\x34\x32\x20\x39\x20\x34\x32\x48\x33\x39\x43\x34\x30\x2e\ +\x36\x35\x36\x39\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x36\x35\ +\x36\x39\x20\x34\x32\x20\x33\x39\x56\x32\x39\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x32\x20\x31\x39\x4c\ +\x33\x38\x20\x37\x4c\x32\x36\x20\x31\x31\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ +\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ +\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x38\x20\x37\x43\x33\x33\ +\x20\x32\x32\x20\x33\x30\x20\x32\x35\x20\x32\x30\x20\x32\x39\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ \x00\x00\x01\xe4\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ @@ -1012,6 +504,1205 @@ qt_resource_data = b"\ \x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ \x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\ \x76\x67\x3e\ +\x00\x00\x02\xf5\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x33\ +\x20\x32\x33\x56\x31\x34\x43\x34\x33\x20\x31\x32\x2e\x38\x39\x35\ +\x34\x20\x34\x32\x2e\x31\x30\x34\x36\x20\x31\x32\x20\x34\x31\x20\ +\x31\x32\x48\x32\x34\x4c\x31\x39\x20\x36\x48\x37\x43\x35\x2e\x38\ +\x39\x35\x34\x33\x20\x36\x20\x35\x20\x36\x2e\x38\x39\x35\x34\x33\ +\x20\x35\x20\x38\x56\x34\x30\x43\x35\x20\x34\x31\x2e\x31\x30\x34\ +\x36\x20\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x32\x20\x37\x20\x34\ +\x32\x48\x32\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ +\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ +\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ +\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ +\x3d\x22\x4d\x32\x37\x20\x33\x31\x4c\x34\x31\x20\x33\x31\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ +\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\ +\x33\x37\x48\x34\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x34\x31\x20\x33\x31\x4c\x33\x36\x20\x32\x36\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\ +\x20\x34\x32\x4c\x32\x37\x20\x33\x37\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ +\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\ +\x73\x76\x67\x3e\ +\x00\x00\x03\x69\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x38\ +\x2e\x32\x38\x35\x37\x20\x33\x37\x48\x33\x39\x2e\x37\x31\x34\x33\ +\x4d\x34\x32\x20\x34\x32\x4c\x33\x39\x2e\x37\x31\x34\x33\x20\x33\ +\x37\x4c\x34\x32\x20\x34\x32\x5a\x4d\x32\x36\x20\x34\x32\x4c\x32\ +\x38\x2e\x32\x38\x35\x37\x20\x33\x37\x4c\x32\x36\x20\x34\x32\x5a\ +\x4d\x32\x38\x2e\x32\x38\x35\x37\x20\x33\x37\x4c\x33\x34\x20\x32\ +\x34\x4c\x33\x39\x2e\x37\x31\x34\x33\x20\x33\x37\x48\x32\x38\x2e\ +\x32\x38\x35\x37\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x31\x36\x20\x36\x4c\x31\x37\x20\x39\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ +\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ +\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\x31\x31\ +\x48\x32\x38\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\ +\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\ +\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\ +\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\ +\x22\x4d\x31\x30\x20\x31\x36\x43\x31\x30\x20\x31\x36\x20\x31\x31\ +\x2e\x37\x38\x39\x35\x20\x32\x32\x2e\x32\x36\x30\x39\x20\x31\x36\ +\x2e\x32\x36\x33\x32\x20\x32\x35\x2e\x37\x33\x39\x31\x43\x32\x30\ +\x2e\x37\x33\x36\x38\x20\x32\x39\x2e\x32\x31\x37\x34\x20\x32\x38\ +\x20\x33\x32\x20\x32\x38\x20\x33\x32\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ +\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ +\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x20\x31\x31\x43\x32\x34\ +\x20\x31\x31\x20\x32\x32\x2e\x32\x31\x30\x35\x20\x31\x39\x2e\x32\ +\x31\x37\x34\x20\x31\x37\x2e\x37\x33\x36\x38\x20\x32\x33\x2e\x37\ +\x38\x32\x36\x43\x31\x33\x2e\x32\x36\x33\x32\x20\x32\x38\x2e\x33\ +\x34\x37\x38\x20\x36\x20\x33\x32\x20\x36\x20\x33\x32\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ +\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ +\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x07\x34\ +\x89\ +\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ +\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\ +\x00\x00\x06\xfb\x49\x44\x41\x54\x78\x9c\xbd\x97\xd9\x6f\x54\xd7\ +\x1d\xc7\x3f\xbf\x73\x97\x99\xb9\xb3\xda\x63\x83\xed\x02\x9e\x00\ +\x06\x1c\x11\x50\x51\xa5\x56\x50\x4a\x89\xca\xd2\x54\x7d\x49\xab\ +\xfc\x01\x7d\x05\xe5\x21\xad\xd4\x4d\x91\x92\xb4\x59\xa4\xaa\x55\ +\x1f\x4b\x94\xb6\xa9\xaa\x44\xa9\xd4\x2a\x0f\x25\x65\xab\x4a\xa5\ +\xaa\x25\x11\x09\x38\x1b\x60\x3b\x08\x1c\xcc\x18\xef\x9e\x31\xe3\ +\x99\xb9\xcb\x39\x7d\x98\xb1\x59\x6c\xc0\xa6\x52\xcf\xc3\x79\xb8\ +\xcb\xf9\x7d\xee\xef\xf7\xfd\x2d\x57\x8a\xc5\xa2\xe1\x21\x97\x34\ +\x37\xf3\xd0\x27\x80\xfa\x5f\x8c\x6b\x84\x20\x02\x25\xff\x67\x00\ +\x91\xc6\x76\xb4\x7f\x8a\x23\x67\x8b\x94\x7d\x83\xf5\x90\x14\x2b\ +\x06\x10\x01\x41\x38\x36\x38\xcd\xf1\x81\x51\x3e\x1a\x99\xe1\x8d\ +\xbe\x1b\x94\xeb\xfa\xa1\x20\x56\x04\x20\xcd\xfd\x9d\xc1\x69\x8e\ +\x5d\x1a\xc3\xb3\x2d\x32\x71\x97\x8b\x63\xb3\xbc\xf1\xe1\x28\xb3\ +\xbe\x5e\x71\x38\x96\x0d\x20\x02\x22\xc2\xdf\x9a\xc6\x6d\x4b\x9a\ +\x02\x34\x58\x22\x7c\x72\xa3\xcc\x1f\xfb\x46\xb9\x19\xac\x2c\x1c\ +\xf6\x72\x8d\x1b\x23\xd4\x35\x04\xda\x60\xa9\x79\x6f\x40\x2a\xe6\ +\x90\xf3\x62\x94\xab\x3e\xb5\x50\x13\x68\xa1\x1e\x1a\x62\xb6\x10\ +\xe9\x07\xa7\xc7\x03\x01\x54\x53\x70\xaf\x9d\x1d\xe6\x7a\xb9\xc6\ +\xf7\xbf\xb6\x81\xb4\xeb\x70\xae\x58\x26\xef\xc5\xf8\x42\x2e\xc1\ +\xae\x42\x0b\x7e\x18\x62\x99\x88\xb7\xce\x5f\xa3\xaf\x58\xe2\xa5\ +\x83\x5b\xc8\x7b\x0e\xe1\x03\x20\xee\x0b\xa0\x9a\x6e\x7f\xfd\x83\ +\x22\x6f\x9e\xbb\x86\x12\x21\xe9\xda\x1c\xde\x59\x60\x4b\x47\x8e\ +\x44\xcc\xa1\xd5\x0e\x41\xfb\x88\x6b\xd0\x58\x0c\x95\x7c\xfe\xfa\ +\x71\x91\x30\xd2\xbc\xf4\xcd\x5e\x56\x25\x1d\xa2\xfb\x30\xdc\x53\ +\x03\xf3\xc6\x7f\xff\x7e\x91\xb7\xfa\xae\xd3\x99\x49\xe0\x58\xc2\ +\xb1\x8b\x23\xfc\xfa\x5f\x57\x68\x75\x23\xda\x9c\x00\x1d\x05\x68\ +\x63\x88\x34\x58\x68\x9e\xde\xbd\x91\x6d\x6b\xf2\xbc\x37\x34\xcd\ +\xd0\x4c\x1d\xc4\x5a\xb9\x07\xe6\xd5\xfe\x87\x73\x45\x5e\x3f\x3b\ +\xc4\xea\x74\x1c\x43\x43\x70\x18\x38\x7e\x69\x04\x2f\x66\x73\xe8\ +\x2b\x6b\xb1\x00\xd3\x7c\x27\xd2\x86\x42\xab\xc7\xe1\x3d\x5b\x30\ +\x61\x95\xad\x5d\x39\x2e\xcc\x68\xda\xe3\x8a\xb6\x98\x66\xa9\x68\ +\x2c\x02\x10\xc0\xb2\x14\x6f\xf6\xdd\xe0\xd5\x33\x57\xc9\x26\x6c\ +\x5c\xdb\x62\x6c\x76\x0e\x03\x28\x05\xf5\x40\x33\x5b\x8b\x40\x14\ +\x98\x08\x03\xd4\xb5\x10\xb7\xa0\x5a\xad\xf2\xad\x9e\x2c\x35\x9d\ +\xe5\x72\xc9\x50\xf1\x35\x73\x01\x28\x51\xb4\xb8\x7a\x51\xd9\x56\ +\x77\x1b\x17\x11\x8c\x81\xae\x6c\x9c\x8e\x6c\x02\x85\x30\x71\xb3\ +\x06\x40\xd2\x75\xf0\x43\xc3\xd6\x8e\x1c\xdf\xfb\x52\x17\x0e\x1a\ +\x03\x58\x4a\xf1\xf6\xa7\xa3\xfc\xee\x83\x22\x4a\x84\x5a\x64\xb8\ +\x5c\xd2\x54\x83\x08\xab\x79\xde\xd5\xb2\x66\xda\x57\xdc\x9d\xa0\ +\x0b\x00\x4a\x1a\x07\x1d\x79\xef\x73\x5e\x39\x7d\x99\x3d\x85\x16\ +\x7e\xb4\x77\x13\xe9\x84\x4b\xc5\x0f\x50\x4a\xa8\x06\x11\x8f\x75\ +\xb5\xf0\xec\xbe\xcd\xac\x6f\x4b\xa3\x9b\xef\x5a\x96\xc5\x81\x9e\ +\x3c\x8f\xad\x4a\x12\x18\x61\x2a\x70\x88\xf4\xad\x60\xaa\xdb\x21\ +\x02\xd5\x28\xe5\xb7\x87\x40\x04\x94\x52\xbc\xfa\xee\x10\xbf\x3c\ +\x3d\x88\xa5\x04\x2f\x66\xf3\xcc\xee\x02\x87\x76\x16\x78\xf9\x1f\ +\x03\xcc\xd6\x02\x1e\xed\xc8\xf1\xd3\xc7\x37\xd0\xdd\x12\x27\x52\ +\x36\x50\x07\xc0\x88\xa2\xb3\x25\xcd\xea\x94\x8b\x1f\x69\xf2\x09\ +\x8b\x50\xbb\x8c\xce\xd6\x40\x64\xc1\xb3\xda\x18\xae\x96\x35\x64\ +\x14\x2d\x4e\xc3\x7b\xf6\xbc\xe0\x7e\x73\x66\x88\x5f\x9c\x1e\xc0\ +\xb1\x14\x49\xd7\xe6\x4f\xe7\xae\xa1\xb5\xe1\x99\xdd\x05\x82\x3d\ +\x3d\x1c\xef\x9f\xe0\xd0\xce\x75\xcc\xd4\x0d\x93\xc5\x0a\xdb\x3a\ +\x74\x43\x94\x40\xe8\xd7\x51\xb1\x38\xa1\x06\x8c\xc6\x26\xc4\x20\ +\x8b\xca\xb2\x6a\x42\x5c\x29\x6b\xac\xac\x22\x63\xeb\x86\x07\x42\ +\x6d\x18\x98\x98\x23\xd4\x86\xa4\xab\xc8\xc6\x5d\x26\x2b\x35\x7e\ +\xfb\xee\x15\x8c\x31\x1c\xde\x55\x60\x57\x77\x8e\xfe\xc9\x3a\xa7\ +\x06\xa7\x30\x18\xe2\x4e\x3b\x9b\x5a\x9d\x85\x6a\xe7\xd7\x6b\x0b\ +\x4a\x72\x88\x50\x34\xb2\x43\x35\x23\x31\x2f\x3e\x11\xc1\xb3\x15\ +\x8e\x6a\x5c\x50\x06\x70\x94\xf0\xdc\xbe\x1e\xbe\xb3\x7d\x2d\xf5\ +\x50\xe3\x47\x11\x31\xa7\x91\x60\xe7\x8b\x65\x6e\xfa\x9a\xc1\xa9\ +\x1a\x7f\xff\x6c\x8a\x48\x6b\xea\x41\xc4\x85\xf1\x1a\xb6\x1b\xbf\ +\x23\x9e\x8e\x12\xae\x94\x7c\x5e\x3b\x3b\x4c\xce\x0e\xe9\x4a\x3b\ +\x0b\xf7\x45\x40\x1b\xf0\x1c\x45\x21\x6d\x48\x28\x7d\x0b\x50\x1b\ +\x43\x26\x66\xf1\xfc\xfe\x4d\x3c\xf5\xc5\xb5\x44\x06\x6e\xd6\x02\ +\x7a\x57\xe7\x78\x6e\xdf\x66\x6e\x54\x42\x4e\x0e\x4e\xe3\x47\x51\ +\x43\x74\x4a\x18\x9e\xa9\x70\xbd\x02\xb6\x13\x6f\xe8\x00\x98\xac\ +\x43\x4d\x5b\x1c\xbb\x38\xca\xf3\x27\x2e\xd1\xea\x44\x74\xa4\x6d\ +\x84\x86\x07\x92\x6e\xc3\x78\xbc\x69\x7c\x41\x84\xf3\x61\x48\xb9\ +\x8a\x1f\x3f\xbe\x11\x5f\x1b\x06\xc6\x66\x79\xf1\x89\x5e\x02\x23\ +\x9c\x18\x98\x24\x08\x23\x94\x08\x8e\xa5\xb0\x2d\xa1\x54\xf5\x39\ +\xfa\xf1\x30\x4f\x6e\x5f\xc3\x2a\xc7\x22\x34\x50\x0d\x2c\xde\xbf\ +\x36\x41\xa9\x5a\xe7\xed\xcf\x27\xc8\x25\x5c\x7e\xb0\x77\x03\x26\ +\x65\x53\xaa\x6b\xba\x53\x86\xb8\xdc\x32\x7e\x07\x00\x34\x2a\x59\ +\xc2\x16\x7e\xb8\x67\x3d\x73\x81\xa6\xec\xc3\xf1\x81\x49\xfc\xa6\ +\x71\x03\x44\xc6\xb0\x2e\x9b\xe4\x91\x7c\x8a\x52\xd5\x27\xf4\xab\ +\x9c\x1c\x9a\x61\x6d\x4b\x8a\xf6\x5c\x0a\xad\x35\x63\xe5\x2a\x71\ +\x47\x71\xf4\xc2\x08\x22\xf0\xf4\x57\xd7\xd3\x9e\xb1\xb0\xcc\x9d\ +\xc6\x17\x01\xcc\x43\xb4\xc4\x6d\x5a\x12\xc2\xa7\x97\x67\xa9\x06\ +\x11\xb6\x9a\xcf\x15\xb0\x95\x50\xc8\x67\xd8\xd1\x95\x24\x0a\xaa\ +\x68\xad\x39\xd1\x3f\x4e\xdf\xf5\x41\xbe\xde\xb3\x9a\x81\xb1\x32\ +\x91\x36\x64\xe2\x2e\x4a\x09\xe7\x86\x4b\x94\x6b\x21\x19\x57\x08\ +\x97\x53\x8a\x69\x7e\xa5\x60\xd8\xdd\x9d\xa4\x1a\x44\xf4\x8f\x57\ +\x40\x1a\x7d\x20\xe9\x3a\x24\x5d\x81\xa8\x86\x32\x11\x96\x52\xbc\ +\xb0\x7f\x13\xe7\x47\x2a\x1c\x39\x73\x85\x7f\x5f\x1e\x43\x04\x82\ +\xc8\xb0\x2e\x9b\xe0\x67\x07\x7b\x59\x93\xb9\x77\x5b\xbe\x67\x37\ +\x34\x06\xe2\x16\x1c\xd8\x98\x61\x6b\x47\x9a\x50\x1b\x82\xc8\x90\ +\x8a\xd9\x74\xa7\x05\xc7\xb2\x40\x2c\x22\xad\xb1\xc5\xb0\x6b\x5d\ +\x8a\x23\x4f\xed\xe0\x57\xdf\xfd\x32\x31\xdb\x22\x1b\xb7\x79\xe1\ +\xc0\x16\xb6\x75\x78\xf7\x9d\x09\xee\x3b\x0f\x68\x03\x31\x0b\xf6\ +\x16\x52\x54\x83\x90\xa9\x6a\xc8\xbe\x8d\x39\xfe\xf9\xd9\x38\x99\ +\xa4\xc7\xfe\x8d\x79\xca\x95\x4a\xa3\x19\x85\x1a\x5b\x05\x3c\xb9\ +\x75\x35\x9e\xb5\x83\xd6\xb8\x62\x7b\x47\x8a\x5a\x10\xa1\x94\x5a\ +\x28\x5a\x77\x2f\x59\xce\x8f\x89\x12\x98\x0b\x0d\xbe\x16\xfe\x73\ +\x75\x8a\x9f\xbc\xf3\x09\x9e\x63\xf1\xca\xb7\xb7\xb1\xf7\x91\x2c\ +\x41\xa4\x17\x9e\x15\xc0\xb6\x14\x88\x30\x3a\x36\xce\xe4\xc4\x24\ +\x6d\xed\x6d\xe4\xdb\xf2\x60\x40\x6b\x8d\x8e\x34\x06\x83\x6d\xdb\ +\xcb\x9b\x09\xb5\x01\xcf\x16\x62\x46\x38\x35\x30\x4e\xa9\xea\xe3\ +\x87\x8a\x9f\x9f\xbc\x88\x1c\x78\x94\x3d\x85\x0c\x61\x13\xc2\x00\ +\x41\xa4\x51\x4a\x51\xaf\xd5\x71\x5d\x97\x99\xe9\x19\x6c\xdb\x26\ +\x0c\x43\x8c\x36\x4c\x4f\x4f\x03\xb0\xae\x7b\xdd\xf2\xa7\x62\x6d\ +\x1a\x8d\xe3\xd9\x6f\xf4\x70\xb0\xb7\x13\x3f\xd4\x0c\x4d\xcd\xf2\ +\x97\x8f\x46\xb8\x51\x65\xc9\x49\x58\x6b\x4d\x18\x86\x64\xb3\x59\ +\xca\xa5\x32\xa5\x52\x89\x89\xf1\x09\x8c\x31\x04\x41\xc0\x40\xff\ +\xc0\xca\xfe\x0b\x22\x63\x68\x4b\x58\xbc\xfc\x44\x2f\xfb\xb6\x74\ +\x11\x6a\xc3\xa9\x8b\xc3\xfc\xf9\xc3\x22\xb2\xc4\xe8\xe5\xba\x2e\ +\x9d\x5d\x9d\x78\x9e\xb7\x00\x64\x3b\x36\x9e\xe7\xe1\x79\x1e\x8e\ +\xe3\x2c\x2f\x04\xb7\xaf\x50\x1b\xf2\x71\x8b\x17\x0f\x6e\x26\xd4\ +\x9a\xfe\xb1\x59\x76\x74\x7a\x18\x13\x2d\xfa\xfa\xd6\x7c\x2b\x4a\ +\x29\x2c\xcb\x22\x0c\x43\xfc\xc0\x27\x9d\x4e\x23\xb7\x35\x90\x65\ +\x89\x70\xa9\x65\x2b\x61\xac\x12\x32\x55\x0d\xe8\x6d\x4f\x10\x2c\ +\x31\xfa\x8a\x08\x41\x10\x30\x37\x37\x47\x32\x99\xc4\x71\x9c\x45\ +\xd9\xf0\xd0\x00\x00\x96\x08\x22\x2c\x99\xe7\x22\xc2\xdc\xdc\x1c\ +\xc5\xeb\x45\x5c\xd7\xc5\x75\x5d\x3a\x3a\x3b\x16\x01\xfc\x17\x8c\ +\x9f\x4a\xdc\x4b\xe3\x48\xf3\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ +\x42\x60\x82\ +\x00\x00\x03\x43\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ +\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ +\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ +\x22\x20\x64\x3d\x22\x4d\x35\x20\x31\x30\x43\x35\x20\x38\x2e\x38\ +\x39\x35\x34\x33\x20\x35\x2e\x38\x39\x35\x34\x33\x20\x38\x20\x37\ +\x20\x38\x4c\x34\x31\x20\x38\x43\x34\x32\x2e\x31\x30\x34\x36\x20\ +\x38\x20\x34\x33\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x33\x20\ +\x31\x30\x56\x33\x38\x43\x34\x33\x20\x33\x39\x2e\x31\x30\x34\x36\ +\x20\x34\x32\x2e\x31\x30\x34\x36\x20\x34\x30\x20\x34\x31\x20\x34\ +\x30\x48\x37\x43\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\x35\ +\x20\x33\x39\x2e\x31\x30\x34\x36\x20\x35\x20\x33\x38\x56\x31\x30\ +\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\ +\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\ +\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ +\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\ +\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\ +\x63\x6c\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\ +\x64\x64\x22\x20\x64\x3d\x22\x4d\x31\x34\x2e\x35\x20\x31\x38\x43\ +\x31\x35\x2e\x33\x32\x38\x34\x20\x31\x38\x20\x31\x36\x20\x31\x37\ +\x2e\x33\x32\x38\x34\x20\x31\x36\x20\x31\x36\x2e\x35\x43\x31\x36\ +\x20\x31\x35\x2e\x36\x37\x31\x36\x20\x31\x35\x2e\x33\x32\x38\x34\ +\x20\x31\x35\x20\x31\x34\x2e\x35\x20\x31\x35\x43\x31\x33\x2e\x36\ +\x37\x31\x36\x20\x31\x35\x20\x31\x33\x20\x31\x35\x2e\x36\x37\x31\ +\x36\x20\x31\x33\x20\x31\x36\x2e\x35\x43\x31\x33\x20\x31\x37\x2e\ +\x33\x32\x38\x34\x20\x31\x33\x2e\x36\x37\x31\x36\x20\x31\x38\x20\ +\x31\x34\x2e\x35\x20\x31\x38\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\ +\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ +\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\ +\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x20\x32\x34\x4c\x32\x30\x20\ +\x32\x38\x4c\x32\x36\x20\x32\x31\x4c\x34\x33\x20\x33\x34\x56\x33\ +\x38\x43\x34\x33\x20\x33\x39\x2e\x31\x30\x34\x36\x20\x34\x32\x2e\ +\x31\x30\x34\x36\x20\x34\x30\x20\x34\x31\x20\x34\x30\x48\x37\x43\ +\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x30\x20\x35\x20\x33\x39\x2e\ +\x31\x30\x34\x36\x20\x35\x20\x33\x38\x56\x33\x34\x4c\x31\x35\x20\ +\x32\x34\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ +\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ +\x67\x3e\ +\x00\x00\x01\xd2\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ +\x2e\x30\x30\x38\x33\x20\x33\x33\x2e\x38\x39\x39\x35\x56\x36\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x36\ +\x20\x32\x32\x4c\x32\x34\x20\x33\x34\x4c\x31\x32\x20\x32\x32\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x36\ +\x20\x34\x32\x48\x31\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ +\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\ +\x3e\ +\x00\x00\x02\x71\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x39\ +\x2e\x33\x20\x36\x48\x38\x2e\x37\x43\x37\x2e\x32\x30\x38\x38\x33\ +\x20\x36\x20\x36\x20\x37\x2e\x32\x30\x38\x38\x33\x20\x36\x20\x38\ +\x2e\x37\x56\x33\x39\x2e\x33\x43\x36\x20\x34\x30\x2e\x37\x39\x31\ +\x32\x20\x37\x2e\x32\x30\x38\x38\x33\x20\x34\x32\x20\x38\x2e\x37\ +\x20\x34\x32\x48\x33\x39\x2e\x33\x43\x34\x30\x2e\x37\x39\x31\x32\ +\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x37\x39\x31\x32\x20\x34\ +\x32\x20\x33\x39\x2e\x33\x56\x38\x2e\x37\x43\x34\x32\x20\x37\x2e\ +\x32\x30\x38\x38\x33\x20\x34\x30\x2e\x37\x39\x31\x32\x20\x36\x20\ +\x33\x39\x2e\x33\x20\x36\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\ +\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\ +\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\ +\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\x20\x36\x56\x32\ +\x34\x48\x31\x35\x56\x36\x48\x33\x32\x5a\x22\x20\x66\x69\x6c\x6c\ +\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x36\x20\ +\x31\x33\x56\x31\x37\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\x2e\x39\ +\x39\x37\x31\x20\x36\x48\x33\x35\x2e\x39\x39\x38\x36\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ +\x22\x73\x71\x75\x61\x72\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\ +\x00\x00\x01\xe3\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ +\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ +\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ +\x22\x20\x64\x3d\x22\x4d\x34\x20\x34\x30\x2e\x38\x33\x36\x31\x43\ +\x38\x2e\x38\x39\x33\x30\x37\x20\x33\x34\x2e\x38\x36\x33\x32\x20\ +\x31\x33\x2e\x32\x33\x38\x33\x20\x33\x31\x2e\x34\x37\x33\x39\x20\ +\x31\x37\x2e\x30\x33\x35\x36\x20\x33\x30\x2e\x36\x36\x38\x32\x43\ +\x32\x30\x2e\x38\x33\x32\x39\x20\x32\x39\x2e\x38\x36\x32\x35\x20\ +\x32\x34\x2e\x34\x34\x38\x33\x20\x32\x39\x2e\x37\x34\x30\x38\x20\ +\x32\x37\x2e\x38\x38\x31\x38\x20\x33\x30\x2e\x33\x30\x33\x56\x34\ +\x31\x4c\x34\x34\x20\x32\x33\x2e\x35\x34\x35\x33\x4c\x32\x37\x2e\ +\x38\x38\x31\x38\x20\x37\x56\x31\x37\x2e\x31\x36\x37\x43\x32\x31\ +\x2e\x35\x33\x33\x33\x20\x31\x37\x2e\x32\x31\x37\x32\x20\x31\x36\ +\x2e\x31\x33\x36\x32\x20\x31\x39\x2e\x34\x39\x34\x38\x20\x31\x31\ +\x2e\x36\x39\x30\x35\x20\x32\x34\x43\x37\x2e\x32\x34\x34\x37\x34\ +\x20\x32\x38\x2e\x35\x30\x35\x32\x20\x34\x2e\x36\x38\x31\x32\x36\ +\x20\x33\x34\x2e\x31\x31\x37\x32\x20\x34\x20\x34\x30\x2e\x38\x33\ +\x36\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ +\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ +\x67\x3e\ +\x00\x00\x04\x7e\ +\x00\ +\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\ +\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\ +\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x04\x00\x00\x12\x0b\x00\ +\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x64\x61\x5e\ +\xff\x60\x5a\x5f\xff\x64\x61\x5e\xff\x5f\x5f\x5e\xff\x58\x59\x5f\ +\xff\x5a\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\ +\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\ +\xff\x5b\x5b\x5f\xff\x62\x60\x5e\xff\x64\x61\x5e\xff\x1b\x17\x13\ +\xff\x29\x33\x0d\xff\x19\x16\x12\xff\x32\x20\x11\xff\x50\x3c\x0d\ +\xff\x45\x33\x0e\xff\x42\x31\x0e\xff\x44\x32\x0e\xff\x44\x32\x0e\ +\xff\x44\x32\x0e\xff\x44\x32\x0e\xff\x44\x32\x0e\xff\x44\x33\x0e\ +\xff\x41\x31\x0e\xff\x20\x1a\x12\xff\x1a\x16\x13\xff\x25\x20\x1f\ +\xff\x66\xa0\x08\xff\x65\xa2\x08\xff\x4e\x57\x15\xff\x78\x54\x17\ +\xff\xb7\x83\x11\xff\xc2\x8d\x0f\xff\xb9\x86\x11\xff\xb9\x86\x11\ +\xff\xb8\x86\x11\xff\xba\x87\x10\xff\xba\x87\x10\xff\xb8\x86\x11\ +\xff\xc3\x8d\x0f\xff\x66\x4e\x18\xff\x1d\x1c\x20\xff\x24\x1e\x1d\ +\xff\x60\x97\x07\xff\x77\xc5\x00\xff\x6e\xb9\x01\xff\x44\x70\x0d\ +\xff\x3a\x32\x19\xff\x88\x64\x13\xff\xb4\x84\x11\xff\xb6\x85\x11\ +\xff\xb8\x85\x0f\xff\xb0\x7f\x0f\xff\xaf\x7f\x0f\xff\xad\x7e\x0f\ +\xff\xbb\x86\x0e\xff\x6f\x53\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x73\xbc\x01\xff\x6f\xb3\x03\xff\x7c\xcc\x00\ +\xff\x50\x81\x0d\xff\x1e\x15\x1a\xff\x46\x32\x0c\xff\x52\x39\x0b\ +\xff\x76\x57\x12\xff\xb0\x80\x10\xff\xb3\x82\x0e\xff\xae\x7e\x0f\ +\xff\xbc\x87\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x73\xbd\x01\xff\x71\xb9\x01\xff\x65\xa4\x06\ +\xff\x32\x35\x10\xff\x21\x24\x2d\xff\x11\x3a\x6a\xff\x11\x36\x62\ +\xff\x17\x1a\x23\xff\x49\x37\x13\xff\xad\x7e\x10\xff\xb0\x7f\x0f\ +\xff\xbb\x87\x0e\xff\x6e\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x73\xbd\x01\xff\x71\xbc\x03\xff\x3a\x48\x0d\ +\xff\x17\x20\x40\xff\x06\x68\xd2\xff\x02\x72\xee\xff\x02\x73\xef\ +\xff\x0d\x60\xbc\xff\x14\x1c\x2a\xff\x67\x4c\x12\xff\xb7\x85\x0f\ +\xff\xba\x86\x0e\xff\x6e\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x97\x07\xff\x76\xc2\x00\xff\x65\xa3\x07\xff\x28\x20\x11\ +\xff\x0c\x54\xa5\xff\x00\x72\xf1\xff\x04\x6a\xdb\xff\x04\x6a\xdb\ +\xff\x01\x74\xf4\xff\x0f\x44\x80\xff\x3a\x28\x0d\xff\xac\x7e\x12\ +\xff\xbc\x88\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x97\x07\xff\x77\xc4\x00\xff\x61\x9a\x07\xff\x24\x1e\x18\ +\xff\x09\x5e\xbc\xff\x02\x6e\xe6\xff\x03\x6b\xde\xff\x04\x6a\xdc\ +\xff\x02\x71\xeb\xff\x0c\x4d\x99\xff\x35\x25\x0e\xff\xa9\x7c\x12\ +\xff\xbd\x88\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x75\xc0\x01\xff\x6a\xae\x06\xff\x2c\x27\x0e\ +\xff\x11\x45\x83\xff\x00\x76\xf7\xff\x03\x6d\xe2\xff\x02\x6e\xe5\ +\xff\x00\x73\xf2\xff\x19\x3b\x5d\xff\x28\x1e\x11\xff\x94\x6d\x14\ +\xff\xc1\x8b\x0d\xff\x6c\x51\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x73\xbc\x01\xff\x74\xbf\x01\xff\x4b\x6d\x0d\ +\xff\x1c\x12\x21\xff\x10\x4d\x91\xff\x06\x69\xd3\xff\x07\x67\xcf\ +\xff\x13\x40\x77\xff\x23\x16\x18\xff\x1b\x19\x1f\xff\x4d\x3d\x18\ +\xff\xc2\x8b\x0d\xff\x6c\x51\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ +\xff\x60\x98\x07\xff\x73\xbe\x01\xff\x6f\xb6\x02\xff\x71\xba\x02\ +\xff\x48\x66\x0d\xff\x27\x1e\x12\xff\x1e\x1d\x29\xff\x1f\x1b\x25\ +\xff\x2a\x25\x10\xff\x50\x78\x0c\xff\x35\x40\x17\xff\x1b\x16\x1e\ +\xff\x9c\x72\x10\xff\x74\x56\x15\xff\x19\x19\x1e\xff\x24\x1e\x1d\ +\xff\x60\x97\x07\xff\x73\xbd\x01\xff\x6f\xb5\x02\xff\x70\xb6\x02\ +\xff\x73\xbf\x01\xff\x65\xa3\x07\xff\x57\x83\x08\xff\x58\x86\x08\ +\xff\x68\xa9\x07\xff\x79\xca\x00\xff\x53\x7c\x0c\xff\x15\x0f\x1f\ +\xff\x5f\x49\x16\xff\x76\x58\x14\xff\x18\x18\x1e\xff\x24\x1f\x1d\ +\xff\x63\x9e\x06\xff\x75\xc1\x00\xff\x71\xb9\x02\xff\x72\xba\x01\ +\xff\x71\xb9\x02\xff\x74\xbf\x01\xff\x76\xc4\x01\xff\x76\xc4\x01\ +\xff\x73\xbc\x01\xff\x72\xbc\x01\xff\x6a\xab\x04\xff\x25\x26\x1b\ +\xff\x31\x28\x1b\xff\x59\x45\x17\xff\x1f\x1d\x1e\xff\x22\x1a\x1e\ +\xff\x4f\x75\x0d\xff\x6d\xb2\x03\xff\x69\xa9\x04\xff\x69\xaa\x04\ +\xff\x69\xaa\x04\xff\x69\xa9\x04\xff\x68\xa8\x04\xff\x68\xa8\x04\ +\xff\x69\xa9\x04\xff\x69\xaa\x04\xff\x6b\xae\x03\xff\x34\x40\x16\ +\xff\x21\x1a\x1e\xff\x27\x23\x1c\xff\x25\x21\x1d\xff\x24\x20\x1d\ +\xff\x24\x20\x1d\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\ +\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\ +\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2b\x2c\x1b\xff\x27\x24\x1c\ +\xff\x25\x21\x1d\xff\x25\x21\x1d\xff\x25\x21\x1d\xff\x00\x00\x00\ +\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ +\x00\x00\x05\xa3\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\ +\x2e\x32\x38\x33\x38\x20\x34\x33\x2e\x31\x37\x31\x33\x43\x31\x34\ +\x2e\x39\x33\x32\x37\x20\x34\x32\x2e\x31\x37\x33\x36\x20\x31\x31\ +\x2e\x39\x34\x39\x38\x20\x34\x30\x2e\x33\x32\x31\x33\x20\x39\x2e\ +\x35\x38\x37\x38\x37\x20\x33\x37\x2e\x38\x36\x37\x43\x31\x30\x2e\ +\x34\x36\x39\x20\x33\x36\x2e\x38\x32\x32\x37\x20\x31\x31\x20\x33\ +\x35\x2e\x34\x37\x33\x34\x20\x31\x31\x20\x33\x34\x2e\x30\x30\x30\ +\x31\x43\x31\x31\x20\x33\x30\x2e\x36\x38\x36\x34\x20\x38\x2e\x33\ +\x31\x33\x37\x31\x20\x32\x38\x2e\x30\x30\x30\x31\x20\x35\x20\x32\ +\x38\x2e\x30\x30\x30\x31\x43\x34\x2e\x37\x39\x39\x35\x35\x20\x32\ +\x38\x2e\x30\x30\x30\x31\x20\x34\x2e\x36\x30\x31\x33\x39\x20\x32\ +\x38\x2e\x30\x31\x20\x34\x2e\x34\x30\x35\x39\x39\x20\x32\x38\x2e\ +\x30\x32\x39\x32\x43\x34\x2e\x31\x33\x39\x37\x39\x20\x32\x36\x2e\ +\x37\x32\x37\x37\x20\x34\x20\x32\x35\x2e\x33\x38\x30\x33\x20\x34\ +\x20\x32\x34\x2e\x30\x30\x30\x31\x43\x34\x20\x32\x31\x2e\x39\x30\ +\x39\x35\x20\x34\x2e\x33\x32\x30\x37\x37\x20\x31\x39\x2e\x38\x39\ +\x33\x38\x20\x34\x2e\x39\x31\x35\x37\x39\x20\x31\x37\x2e\x39\x39\ +\x39\x35\x43\x34\x2e\x39\x34\x33\x38\x31\x20\x31\x37\x2e\x39\x39\ +\x39\x39\x20\x34\x2e\x39\x37\x31\x38\x38\x20\x31\x38\x2e\x30\x30\ +\x30\x31\x20\x35\x20\x31\x38\x2e\x30\x30\x30\x31\x43\x38\x2e\x33\ +\x31\x33\x37\x31\x20\x31\x38\x2e\x30\x30\x30\x31\x20\x31\x31\x20\ +\x31\x35\x2e\x33\x31\x33\x38\x20\x31\x31\x20\x31\x32\x2e\x30\x30\ +\x30\x31\x43\x31\x31\x20\x31\x31\x2e\x30\x34\x38\x38\x20\x31\x30\ +\x2e\x37\x37\x38\x36\x20\x31\x30\x2e\x31\x34\x39\x33\x20\x31\x30\ +\x2e\x33\x38\x34\x36\x20\x39\x2e\x33\x35\x30\x31\x31\x43\x31\x32\ +\x2e\x36\x39\x37\x35\x20\x37\x2e\x31\x39\x39\x35\x20\x31\x35\x2e\ +\x35\x32\x30\x35\x20\x35\x2e\x35\x39\x30\x30\x32\x20\x31\x38\x2e\ +\x36\x35\x32\x31\x20\x34\x2e\x37\x32\x33\x31\x34\x43\x31\x39\x2e\ +\x36\x34\x34\x34\x20\x36\x2e\x36\x36\x38\x31\x39\x20\x32\x31\x2e\ +\x36\x36\x36\x37\x20\x38\x2e\x30\x30\x30\x31\x33\x20\x32\x34\x20\ +\x38\x2e\x30\x30\x30\x31\x33\x43\x32\x36\x2e\x33\x33\x33\x33\x20\ +\x38\x2e\x30\x30\x30\x31\x33\x20\x32\x38\x2e\x33\x35\x35\x36\x20\ +\x36\x2e\x36\x36\x38\x31\x39\x20\x32\x39\x2e\x33\x34\x37\x39\x20\ +\x34\x2e\x37\x32\x33\x31\x34\x43\x33\x32\x2e\x34\x37\x39\x35\x20\ +\x35\x2e\x35\x39\x30\x30\x32\x20\x33\x35\x2e\x33\x30\x32\x35\x20\ +\x37\x2e\x31\x39\x39\x35\x20\x33\x37\x2e\x36\x31\x35\x34\x20\x39\ +\x2e\x33\x35\x30\x31\x31\x43\x33\x37\x2e\x32\x32\x31\x34\x20\x31\ +\x30\x2e\x31\x34\x39\x33\x20\x33\x37\x20\x31\x31\x2e\x30\x34\x38\ +\x38\x20\x33\x37\x20\x31\x32\x2e\x30\x30\x30\x31\x43\x33\x37\x20\ +\x31\x35\x2e\x33\x31\x33\x38\x20\x33\x39\x2e\x36\x38\x36\x33\x20\ +\x31\x38\x2e\x30\x30\x30\x31\x20\x34\x33\x20\x31\x38\x2e\x30\x30\ +\x30\x31\x43\x34\x33\x2e\x30\x32\x38\x31\x20\x31\x38\x2e\x30\x30\ +\x30\x31\x20\x34\x33\x2e\x30\x35\x36\x32\x20\x31\x37\x2e\x39\x39\ +\x39\x39\x20\x34\x33\x2e\x30\x38\x34\x32\x20\x31\x37\x2e\x39\x39\ +\x39\x35\x43\x34\x33\x2e\x36\x37\x39\x32\x20\x31\x39\x2e\x38\x39\ +\x33\x38\x20\x34\x34\x20\x32\x31\x2e\x39\x30\x39\x35\x20\x34\x34\ +\x20\x32\x34\x2e\x30\x30\x30\x31\x43\x34\x34\x20\x32\x35\x2e\x33\ +\x38\x30\x33\x20\x34\x33\x2e\x38\x36\x30\x32\x20\x32\x36\x2e\x37\ +\x32\x37\x37\x20\x34\x33\x2e\x35\x39\x34\x20\x32\x38\x2e\x30\x32\ +\x39\x32\x43\x34\x33\x2e\x33\x39\x38\x36\x20\x32\x38\x2e\x30\x31\ +\x20\x34\x33\x2e\x32\x30\x30\x35\x20\x32\x38\x2e\x30\x30\x30\x31\ +\x20\x34\x33\x20\x32\x38\x2e\x30\x30\x30\x31\x43\x33\x39\x2e\x36\ +\x38\x36\x33\x20\x32\x38\x2e\x30\x30\x30\x31\x20\x33\x37\x20\x33\ +\x30\x2e\x36\x38\x36\x34\x20\x33\x37\x20\x33\x34\x2e\x30\x30\x30\ +\x31\x43\x33\x37\x20\x33\x35\x2e\x34\x37\x33\x34\x20\x33\x37\x2e\ +\x35\x33\x31\x20\x33\x36\x2e\x38\x32\x32\x37\x20\x33\x38\x2e\x34\ +\x31\x32\x31\x20\x33\x37\x2e\x38\x36\x37\x43\x33\x36\x2e\x30\x35\ +\x30\x32\x20\x34\x30\x2e\x33\x32\x31\x33\x20\x33\x33\x2e\x30\x36\ +\x37\x33\x20\x34\x32\x2e\x31\x37\x33\x36\x20\x32\x39\x2e\x37\x31\ +\x36\x32\x20\x34\x33\x2e\x31\x37\x31\x33\x43\x32\x38\x2e\x39\x34\ +\x32\x38\x20\x34\x30\x2e\x37\x35\x32\x20\x32\x36\x2e\x36\x37\x36\ +\x20\x33\x39\x2e\x30\x30\x30\x31\x20\x32\x34\x20\x33\x39\x2e\x30\ +\x30\x30\x31\x43\x32\x31\x2e\x33\x32\x34\x20\x33\x39\x2e\x30\x30\ +\x30\x31\x20\x31\x39\x2e\x30\x35\x37\x32\x20\x34\x30\x2e\x37\x35\ +\x32\x20\x31\x38\x2e\x32\x38\x33\x38\x20\x34\x33\x2e\x31\x37\x31\ +\x33\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ +\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ +\x20\x64\x3d\x22\x4d\x32\x34\x20\x33\x31\x43\x32\x37\x2e\x38\x36\ +\x36\x20\x33\x31\x20\x33\x31\x20\x32\x37\x2e\x38\x36\x36\x20\x33\ +\x31\x20\x32\x34\x43\x33\x31\x20\x32\x30\x2e\x31\x33\x34\x20\x32\ +\x37\x2e\x38\x36\x36\x20\x31\x37\x20\x32\x34\x20\x31\x37\x43\x32\ +\x30\x2e\x31\x33\x34\x20\x31\x37\x20\x31\x37\x20\x32\x30\x2e\x31\ +\x33\x34\x20\x31\x37\x20\x32\x34\x43\x31\x37\x20\x32\x37\x2e\x38\ +\x36\x36\x20\x32\x30\x2e\x31\x33\x34\x20\x33\x31\x20\x32\x34\x20\ +\x33\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ +\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ +\x67\x3e\ +\x00\x00\x03\x3e\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x30\ +\x20\x32\x33\x56\x31\x34\x4c\x33\x31\x20\x34\x48\x31\x30\x43\x38\ +\x2e\x38\x39\x35\x34\x33\x20\x34\x20\x38\x20\x34\x2e\x38\x39\x35\ +\x34\x33\x20\x38\x20\x36\x56\x34\x32\x43\x38\x20\x34\x33\x2e\x31\ +\x30\x34\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x34\x20\x31\ +\x30\x20\x34\x34\x48\x32\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ +\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\ +\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ +\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\ +\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\x33\x33\x48\x34\x31\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ +\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\ +\x33\x39\x48\x34\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x34\x31\x20\x33\x33\x4c\x33\x36\x20\x32\x38\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\ +\x20\x34\x34\x4c\x32\x37\x20\x33\x39\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ +\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ +\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x30\x20\x34\x56\x31\x34\x48\ +\x34\x30\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ +\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ +\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x02\x8d\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\ +\x2e\x35\x20\x38\x43\x31\x33\x2e\x38\x34\x30\x36\x20\x38\x2e\x33\ +\x37\x36\x35\x32\x20\x31\x33\x2e\x32\x30\x36\x32\x20\x38\x2e\x37\ +\x39\x31\x30\x33\x20\x31\x32\x2e\x36\x20\x39\x2e\x32\x34\x30\x35\ +\x31\x43\x31\x31\x2e\x35\x36\x32\x35\x20\x31\x30\x2e\x30\x30\x39\ +\x37\x20\x31\x30\x2e\x36\x30\x37\x34\x20\x31\x30\x2e\x38\x38\x31\ +\x34\x20\x39\x2e\x37\x35\x20\x31\x31\x2e\x38\x34\x30\x32\x43\x36\ +\x2e\x37\x39\x33\x37\x37\x20\x31\x35\x2e\x31\x34\x36\x33\x20\x35\ +\x20\x31\x39\x2e\x34\x38\x39\x31\x20\x35\x20\x32\x34\x2e\x32\x34\ +\x35\x35\x43\x35\x20\x33\x34\x2e\x36\x30\x33\x33\x20\x31\x33\x2e\ +\x35\x30\x36\x36\x20\x34\x33\x20\x32\x34\x20\x34\x33\x43\x33\x34\ +\x2e\x34\x39\x33\x34\x20\x34\x33\x20\x34\x33\x20\x33\x34\x2e\x36\ +\x30\x33\x33\x20\x34\x33\x20\x32\x34\x2e\x32\x34\x35\x35\x43\x34\ +\x33\x20\x31\x39\x2e\x34\x38\x39\x31\x20\x34\x31\x2e\x32\x30\x36\ +\x32\x20\x31\x35\x2e\x31\x34\x36\x33\x20\x33\x38\x2e\x32\x35\x20\ +\x31\x31\x2e\x38\x34\x30\x32\x43\x33\x37\x2e\x33\x39\x32\x36\x20\ +\x31\x30\x2e\x38\x38\x31\x34\x20\x33\x36\x2e\x34\x33\x37\x35\x20\ +\x31\x30\x2e\x30\x30\x39\x37\x20\x33\x35\x2e\x34\x20\x39\x2e\x32\ +\x34\x30\x35\x31\x43\x33\x34\x2e\x37\x39\x33\x38\x20\x38\x2e\x37\ +\x39\x31\x30\x33\x20\x33\x34\x2e\x31\x35\x39\x34\x20\x38\x2e\x33\ +\x37\x36\x35\x32\x20\x33\x33\x2e\x35\x20\x38\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x20\x34\x56\x32\ +\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\ +\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\ +\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ +\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x02\x35\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ +\x20\x33\x38\x43\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x33\ +\x38\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x32\x31\x43\ +\x33\x38\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x33\x30\x2e\x33\x38\ +\x38\x38\x20\x34\x20\x32\x31\x20\x34\x43\x31\x31\x2e\x36\x31\x31\ +\x32\x20\x34\x20\x34\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x34\x20\ +\x32\x31\x43\x34\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x31\x31\x2e\ +\x36\x31\x31\x32\x20\x33\x38\x20\x32\x31\x20\x33\x38\x5a\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ +\x4d\x31\x35\x20\x32\x31\x4c\x32\x37\x20\x32\x31\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ +\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ +\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x33\x2e\x32\x32\ +\x31\x36\x20\x33\x33\x2e\x32\x32\x31\x37\x4c\x34\x31\x2e\x37\x30\ +\x36\x39\x20\x34\x31\x2e\x37\x30\x37\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ +\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ +\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\ +\x73\x76\x67\x3e\ +\x00\x00\x03\x5e\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ +\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ +\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ +\x22\x20\x64\x3d\x22\x4d\x32\x34\x20\x34\x31\x43\x33\x33\x2e\x39\ +\x34\x31\x31\x20\x34\x31\x20\x34\x32\x20\x33\x32\x2e\x36\x37\x38\ +\x20\x34\x32\x20\x32\x37\x43\x34\x32\x20\x32\x31\x2e\x33\x32\x32\ +\x20\x33\x33\x2e\x39\x34\x31\x31\x20\x31\x33\x20\x32\x34\x20\x31\ +\x33\x43\x31\x34\x2e\x30\x35\x38\x39\x20\x31\x33\x20\x36\x20\x32\ +\x31\x2e\x33\x32\x37\x38\x20\x36\x20\x32\x37\x43\x36\x20\x33\x32\ +\x2e\x36\x37\x32\x32\x20\x31\x34\x2e\x30\x35\x38\x39\x20\x34\x31\ +\x20\x32\x34\x20\x34\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\ +\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\ +\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\ +\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x20\x33\x33\x43\ +\x32\x37\x2e\x33\x31\x33\x37\x20\x33\x33\x20\x33\x30\x20\x33\x30\ +\x2e\x33\x31\x33\x37\x20\x33\x30\x20\x32\x37\x43\x33\x30\x20\x32\ +\x33\x2e\x36\x38\x36\x33\x20\x32\x37\x2e\x33\x31\x33\x37\x20\x32\ +\x31\x20\x32\x34\x20\x32\x31\x43\x32\x30\x2e\x36\x38\x36\x33\x20\ +\x32\x31\x20\x31\x38\x20\x32\x33\x2e\x36\x38\x36\x33\x20\x31\x38\ +\x20\x32\x37\x43\x31\x38\x20\x33\x30\x2e\x33\x31\x33\x37\x20\x32\ +\x30\x2e\x36\x38\x36\x33\x20\x33\x33\x20\x32\x34\x20\x33\x33\x5a\ +\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ +\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ +\x3d\x22\x4d\x31\x33\x2e\x32\x36\x33\x37\x20\x31\x31\x2e\x32\x36\ +\x36\x31\x4c\x31\x35\x2e\x38\x35\x38\x32\x20\x31\x34\x2e\x38\x38\ +\x36\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ +\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ +\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x2f\x3e\x3c\ +\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x35\x2e\x36\x32\x35\x20\ +\x31\x31\x2e\x37\x31\x30\x34\x4c\x33\x33\x2e\x30\x33\x30\x34\x20\ +\x31\x35\x2e\x33\x33\x30\x37\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ +\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\ +\x65\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ +\x2e\x30\x30\x38\x38\x20\x37\x56\x31\x33\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ +\x75\x61\x72\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x01\xe4\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ +\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ +\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ +\x22\x20\x64\x3d\x22\x4d\x34\x34\x20\x34\x30\x2e\x38\x33\x36\x31\ +\x43\x33\x39\x2e\x31\x30\x36\x39\x20\x33\x34\x2e\x38\x36\x33\x32\ +\x20\x33\x34\x2e\x37\x36\x31\x37\x20\x33\x31\x2e\x34\x37\x33\x39\ +\x20\x33\x30\x2e\x39\x36\x34\x34\x20\x33\x30\x2e\x36\x36\x38\x32\ +\x43\x32\x37\x2e\x31\x36\x37\x31\x20\x32\x39\x2e\x38\x36\x32\x35\ +\x20\x32\x33\x2e\x35\x35\x31\x37\x20\x32\x39\x2e\x37\x34\x30\x38\ +\x20\x32\x30\x2e\x31\x31\x38\x32\x20\x33\x30\x2e\x33\x30\x33\x56\ +\x34\x31\x4c\x34\x20\x32\x33\x2e\x35\x34\x35\x33\x4c\x32\x30\x2e\ +\x31\x31\x38\x32\x20\x37\x56\x31\x37\x2e\x31\x36\x37\x43\x32\x36\ +\x2e\x34\x36\x36\x37\x20\x31\x37\x2e\x32\x31\x37\x32\x20\x33\x31\ +\x2e\x38\x36\x33\x38\x20\x31\x39\x2e\x34\x39\x34\x38\x20\x33\x36\ +\x2e\x33\x30\x39\x35\x20\x32\x34\x43\x34\x30\x2e\x37\x35\x35\x33\ +\x20\x32\x38\x2e\x35\x30\x35\x32\x20\x34\x33\x2e\x33\x31\x38\x37\ +\x20\x33\x34\x2e\x31\x31\x37\x32\x20\x34\x34\x20\x34\x30\x2e\x38\ +\x33\x36\x31\x5a\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ +\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ +\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\ +\x76\x67\x3e\ +\x00\x00\x03\x57\ +\x89\ +\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ +\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\ +\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\ +\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\ +\x0d\xd7\x01\x42\x28\x9b\x78\x00\x00\x00\x07\x74\x49\x4d\x45\x07\ +\xe7\x04\x13\x03\x14\x20\x67\x3f\x96\xef\x00\x00\x02\xe4\x49\x44\ +\x41\x54\x68\x43\xed\x58\x3b\x8b\x15\x31\x18\x3d\xd7\x07\xae\x95\ +\x62\x25\xb6\xa2\x8d\x0f\x2c\xc4\x46\x10\x41\x41\xf4\x07\x58\x59\ +\x89\x28\x58\x5a\x2c\xba\x6b\x71\x41\x57\xc4\xc2\xce\x42\xb9\x68\ +\x6f\x69\x23\x88\x8d\x62\x6f\x21\x82\x22\x5a\xa8\x20\x56\x6b\x25\ +\x17\x5f\xf1\x7c\x3b\xe6\x92\xc9\x24\x33\xc9\x24\xb3\xc3\xe2\x64\ +\x77\xd8\x9b\xef\x79\xce\x97\x2f\x99\xec\x05\x86\x31\x54\x60\xa8\ +\xc0\x50\x81\x3e\x2b\x30\x72\x25\xbf\x06\xec\x5c\x0f\x1c\x57\xc0\ +\x56\x97\xbe\x07\xd9\xf2\x3a\xe0\xe9\x15\xe0\x83\x9d\xbb\x42\x60\ +\x09\x38\x4b\xa3\x3b\x7c\xe6\x6c\xe3\x9e\xe7\x53\x82\xbd\xb8\x00\ +\x3c\x30\x71\x94\x08\x48\xe5\xc9\xf4\x35\x0d\x36\xf5\x0c\xd6\x97\ +\x7e\x4a\x7c\x7b\xcc\x95\xd8\x60\x5a\x52\x79\x4c\x83\x67\xfb\xbc\ +\x24\xbb\x27\xbe\x48\xab\x2c\x3f\xc1\x7c\x07\xf8\xcc\xfd\x61\x6b\ +\xf3\xef\x3d\x9d\xbf\x44\x80\xc2\x1d\x5a\x41\xf0\x8f\x16\x81\xf1\ +\x2a\x03\x75\xa6\x63\x5b\x4f\xa9\x10\x02\x32\x66\x18\x65\xc2\xa2\ +\xaf\xed\xb1\xe6\x09\xd8\x2d\x14\xb4\x1c\xdc\x1f\xfc\x2d\x06\x5b\ +\xad\x72\x92\x05\x05\xc9\x64\x14\xbd\x02\x26\x78\xc1\x60\xcf\x33\ +\xe1\x0a\x0e\x13\x45\xc0\x07\xd6\x27\x0f\x46\x91\x60\x18\x45\x20\ +\x21\x4f\x67\xae\xff\x17\x01\xdf\x86\xf5\xc9\x3b\x2b\xbb\x11\x38\ +\x7a\x05\x6c\xb0\xf6\xdc\x06\xcd\xfd\x41\x13\x25\xdb\x44\x3d\x04\ +\x78\x47\xcc\x3b\xa2\x09\x48\x7a\x01\xad\x9f\x26\x38\x23\x28\xbe\ +\xfd\x8b\x71\x1a\xea\xd7\x2d\x60\x7b\x93\x4f\x8c\xbe\x15\x81\xf0\ +\x04\x52\xf9\xf2\x98\x87\xfa\x72\x03\x38\x6c\xcb\xdb\xce\x5b\x11\ +\x18\xcb\x15\x44\x15\x3f\x2b\x9f\x9d\xa3\x0a\x5e\x9b\x2d\x40\xbd\ +\x90\x96\x2a\xda\xcb\xe9\x1c\x2c\xf4\x24\xf7\xfb\x0b\xe0\xb1\xc2\ +\x6f\x6d\x21\x9f\x29\xb3\xfe\xf1\xf1\x83\x37\x23\x4b\x7b\xdd\x04\ +\xb6\xf8\xb3\x35\x6b\xa2\x08\x48\xc5\x4c\xf0\x06\x89\x65\xde\x18\ +\xf7\x16\xf3\x30\xf0\xda\xf7\x32\xd4\xb7\x62\x93\x37\x83\x75\x59\ +\x04\x13\x18\xb3\xf2\x23\x85\xd9\x86\xb4\x83\x2d\x2a\xbc\x4a\x01\ +\xd2\xd6\x37\x88\x80\x80\x77\x55\xde\x26\x01\x95\x7a\xaf\x53\xea\ +\x36\xb0\xb9\x12\xb7\x46\xd0\x48\x20\x18\xbc\x4e\x92\x48\xe2\x12\ +\xd4\x77\x9e\x52\xbb\x6b\x30\x97\x54\xb5\x04\xa2\xc1\x67\x22\xc1\ +\x53\xea\x6d\x68\x4b\x79\x09\xf8\x36\x6c\x68\x65\xd2\xdb\x49\x32\ +\x29\x75\x17\xd8\x58\x97\xd3\x4b\x80\xdd\x7c\xaa\xce\x31\x48\x97\ +\xd8\x4e\x92\xe3\x02\xd4\x0f\x82\xf4\x7e\x3f\xe5\x25\x40\xdf\x83\ +\x41\x20\x9b\x8c\x32\x90\xe0\xd1\x77\xde\x97\xa6\x44\x20\xc7\x9b\ +\xd1\x99\x28\x9d\xc4\xec\x64\xb2\x2b\x5e\x9a\xb3\x6d\xf6\x39\x01\ +\xe4\x10\xa6\x93\xd0\x28\xf6\x9b\x70\x6c\x42\xdb\x0c\xe5\x9b\x1c\ +\xb8\x4b\x31\xda\x93\xf8\xac\xe3\xb0\x9d\x4c\x8c\xbe\x8b\xd8\x8a\ +\xf9\xd7\xec\x04\x24\x60\x3b\x12\xef\x7d\x58\xec\x15\xf0\xd9\xe5\ +\x95\xb7\x23\xe1\xc4\xd0\x0f\x81\xf6\x2b\x51\x21\x61\x7f\xb1\xf5\ +\xc9\xb0\x38\xb4\x34\xc2\xe3\x8a\x47\x56\x41\xf0\xdd\xe9\xa8\x4e\ +\x4b\x8f\x8f\x26\x84\x12\x01\xbe\xf2\x26\x3f\x81\x33\xff\x0c\xe4\ +\xe8\x3a\x99\x15\x6f\x86\x60\xdc\xc4\x13\x33\x4c\xa9\x85\xe6\x81\ +\x67\x14\xdc\xcf\x90\xa7\xab\x10\x93\xab\xc0\x73\x33\xb8\x73\x0d\ +\xaf\x03\x47\x48\xe4\x1c\x5f\x6c\xbb\xba\x42\x12\x13\x97\x20\xdf\ +\x49\xe5\x6d\xf0\x31\x31\x06\xdb\xa1\x02\x43\x05\x86\x0a\x74\x53\ +\x81\xbf\x22\x47\x9c\xdf\xb5\xb4\xa1\x9c\x00\x00\x00\x00\x49\x45\ +\x4e\x44\xae\x42\x60\x82\ +\x00\x00\x01\x10\ +\x00\ +\x00\x04\x42\x78\x9c\xc5\x93\x51\x6e\x83\x30\x0c\x86\xaf\x62\x79\ +\xcf\x25\x29\xa0\x0a\x4d\x84\x4a\x7d\xe8\x09\xb6\x03\x54\x34\x40\ +\x36\x9a\x74\x21\x23\x70\xfb\x39\x1b\x74\xe3\x71\x5a\xc5\xa4\x48\ +\xb1\xad\xe8\xfb\x13\xfb\x4f\xbe\x1f\x2e\x2d\xf4\xd2\x76\xca\x68\ +\x81\xdb\x88\x23\x48\x5d\x9a\xb3\xd2\xb5\xc0\xe7\xa7\xe3\x26\xc3\ +\x7d\x91\x77\x7d\x0d\x5e\x9d\x5d\x23\x30\x89\x11\x1a\xa9\xea\xc6\ +\x7d\xc5\xbd\x92\xfe\x60\x06\x81\x1c\x38\xa4\x19\x2d\x84\x4a\xb5\ +\xad\x40\x6d\xb4\x44\x20\xbe\xee\x04\x36\xce\x5d\x1f\x19\xf3\xde\ +\x47\x3e\x89\x8c\xad\x59\xcc\x39\x67\x04\xc6\x22\x6f\x95\x96\x30\ +\x6c\x49\x3f\x45\x18\x69\x27\xc6\x10\x13\x3f\xa4\xf1\x67\xda\x39\ +\x6b\x5e\xa5\xc0\x07\xce\x77\x69\x55\xcd\x85\xcd\x7c\xab\x5b\x21\ +\xb0\xca\xd3\x55\x60\xf7\xf6\x7e\xb2\x12\xd9\xfa\xf8\x94\x2f\xf9\ +\x21\xff\x8b\x80\x95\xa5\x03\xea\x70\xb2\x23\x1e\xe1\x70\x9e\x45\ +\xf6\x3d\x0a\x0a\x2d\x1d\x89\x97\xcd\xff\x95\xea\x8b\x51\x64\x81\ +\x8b\x72\xd2\xfe\x54\x4d\xff\x43\x74\x7a\x6a\xd8\xd6\x7f\xeb\x5a\ +\xaa\x37\xe3\x04\x83\x8c\x93\x81\x86\xc9\x30\xe3\x64\xa0\xbb\x38\ +\x33\x5b\xf2\xb3\xfb\xe0\xc3\xef\x2d\x3e\x00\x2f\xe9\x4b\x1d\ +\x00\x00\x01\x7a\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x37\x20\ +\x34\x32\x48\x34\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x31\x31\x20\x32\x36\x2e\x37\x31\x39\x39\x56\x33\ +\x34\x48\x31\x38\x2e\x33\x31\x37\x32\x4c\x33\x39\x20\x31\x33\x2e\ +\x33\x30\x38\x31\x4c\x33\x31\x2e\x36\x39\x35\x31\x20\x36\x4c\x31\ +\x31\x20\x32\x36\x2e\x37\x31\x39\x39\x5a\x22\x20\x66\x69\x6c\x6c\ +\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ +\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x02\x48\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ +\x20\x34\x34\x43\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x34\x20\x34\ +\x34\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x34\x20\x32\x34\x43\ +\x34\x34\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x33\x35\x2e\x30\x34\ +\x35\x37\x20\x34\x20\x32\x34\x20\x34\x43\x31\x32\x2e\x39\x35\x34\ +\x33\x20\x34\x20\x34\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x20\ +\x32\x34\x43\x34\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x31\x32\x2e\ +\x39\x35\x34\x33\x20\x34\x34\x20\x32\x34\x20\x34\x34\x5a\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ +\x4d\x32\x39\x2e\x36\x35\x36\x37\x20\x31\x38\x2e\x33\x34\x33\x32\ +\x4c\x31\x38\x2e\x33\x34\x33\x20\x32\x39\x2e\x36\x35\x36\x39\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ +\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ +\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\ +\x2e\x33\x34\x33\x33\x20\x31\x38\x2e\x33\x34\x33\x32\x4c\x32\x39\ +\x2e\x36\x35\x37\x20\x32\x39\x2e\x36\x35\x36\x39\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ +\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ +\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x03\x06\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ +\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ +\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ +\x22\x20\x64\x3d\x22\x4d\x32\x34\x20\x34\x34\x43\x33\x35\x2e\x30\ +\x34\x35\x37\x20\x34\x34\x20\x34\x34\x20\x33\x35\x2e\x30\x34\x35\ +\x37\x20\x34\x34\x20\x32\x34\x43\x34\x34\x20\x31\x32\x2e\x39\x35\ +\x34\x33\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x20\x32\x34\x20\ +\x34\x43\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x20\x34\x20\x31\x32\ +\x2e\x39\x35\x34\x33\x20\x34\x20\x32\x34\x43\x34\x20\x33\x35\x2e\ +\x30\x34\x35\x37\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x34\x20\ +\x32\x34\x20\x34\x34\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ +\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ +\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ +\x20\x64\x3d\x22\x4d\x32\x34\x20\x32\x33\x43\x32\x36\x2e\x37\x36\ +\x31\x34\x20\x32\x33\x20\x32\x39\x20\x32\x30\x2e\x37\x36\x31\x34\ +\x20\x32\x39\x20\x31\x38\x43\x32\x39\x20\x31\x35\x2e\x32\x33\x38\ +\x36\x20\x32\x36\x2e\x37\x36\x31\x34\x20\x31\x33\x20\x32\x34\x20\ +\x31\x33\x43\x32\x31\x2e\x32\x33\x38\x36\x20\x31\x33\x20\x31\x39\ +\x20\x31\x35\x2e\x32\x33\x38\x36\x20\x31\x39\x20\x31\x38\x43\x31\ +\x39\x20\x32\x30\x2e\x37\x36\x31\x34\x20\x32\x31\x2e\x32\x33\x38\ +\x36\x20\x32\x33\x20\x32\x34\x20\x32\x33\x5a\x22\x20\x66\x69\x6c\ +\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ +\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ +\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\ +\x2e\x30\x32\x32\x20\x33\x38\x2e\x33\x33\x32\x43\x31\x30\x2e\x33\ +\x36\x35\x37\x20\x33\x33\x2e\x31\x32\x30\x36\x20\x31\x34\x2e\x37\ +\x30\x31\x36\x20\x32\x39\x20\x32\x30\x20\x32\x39\x48\x32\x38\x43\ +\x33\x33\x2e\x32\x39\x31\x34\x20\x32\x39\x20\x33\x37\x2e\x36\x32\ +\x32\x39\x20\x33\x33\x2e\x31\x30\x39\x37\x20\x33\x37\x2e\x39\x37\ +\x36\x37\x20\x33\x38\x2e\x33\x31\x31\x33\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ +\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ +\x2f\x73\x76\x67\x3e\ +\x00\x00\x01\xc6\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x20\ +\x39\x56\x34\x31\x4c\x39\x20\x32\x31\x48\x33\x39\x2e\x35\x56\x31\ +\x35\x43\x33\x39\x2e\x35\x20\x31\x33\x2e\x38\x39\x35\x34\x20\x33\ +\x38\x2e\x36\x30\x34\x36\x20\x31\x33\x20\x33\x37\x2e\x35\x20\x31\ +\x33\x48\x32\x34\x4c\x31\x39\x20\x37\x48\x36\x43\x34\x2e\x38\x39\ +\x35\x34\x33\x20\x37\x20\x34\x20\x37\x2e\x38\x39\x35\x34\x33\x20\ +\x34\x20\x39\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ +\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ +\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ +\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ +\x3d\x22\x4d\x34\x30\x20\x34\x31\x4c\x34\x34\x20\x32\x31\x48\x38\ +\x2e\x38\x31\x32\x35\x4c\x34\x20\x34\x31\x48\x34\x30\x5a\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ +\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ +\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ +\x2f\x73\x76\x67\x3e\ +\x00\x00\x06\x28\ +\x00\ +\x00\x15\xb6\x78\x9c\xed\x58\x4b\x6f\x1b\x55\x14\x3e\x1e\xcf\xd8\ +\x5e\x40\x95\xa2\x4a\x6c\x90\x1a\x81\x40\xdd\x20\xaa\x82\xd8\x20\ +\xa1\x6c\x40\x6c\x40\x65\xd1\x56\x6a\x41\x8a\xda\xc4\xb1\x13\x37\ +\x71\x92\x26\xe4\xe1\x24\xe3\x84\x1f\x90\x0a\xba\xa8\x5a\xa1\xb0\ +\x40\x42\x48\x88\x2c\x2a\xba\x01\xe1\x1d\x20\x2a\x6a\x21\x84\x8a\ +\x08\xf1\xf8\x11\xe7\x65\xd7\x93\xc4\x8f\x38\xb6\x7a\xf9\xce\x8c\ +\xc7\x8d\x5d\xdb\x6d\x61\xd1\x05\xb9\xd2\xa7\xf1\x3c\xce\x7c\xdf\ +\x39\xf7\xde\x73\xce\x98\xc8\x46\x12\xb5\xb7\x13\x8e\xed\x74\xec\ +\x30\xd1\x4b\x44\xd4\xd6\x66\x9e\x7b\x64\xa2\xf7\x70\xed\x18\xae\ +\xe1\x11\xea\x20\xf3\xba\x31\xda\xe8\x60\x1c\x8c\x83\xd1\x62\x14\ +\x54\x6a\x2f\xcd\x51\x07\x63\x4f\xa5\xe3\xff\xf5\x7d\x99\x31\x6a\ +\xcf\x8c\xc9\x1d\x8c\xec\x44\xf3\xf7\x31\x57\x69\x96\x42\x80\xa8\ +\x43\xf8\xdf\xe8\x58\xf5\xbb\xda\x37\x86\xe4\x50\xfa\x92\x5d\xdc\ +\x1d\xb1\x0b\xfd\x23\x49\xec\x4c\x48\x22\x1f\xb0\x69\x85\x80\x91\ +\x16\x6a\xb8\xf7\x82\xa4\x03\xa2\x14\x7c\x80\x9f\xa1\x97\xe7\xe8\ +\xe4\xa3\x72\x27\xfc\xca\xf1\xf5\x41\x59\xdf\x18\x94\xc5\xe6\x90\ +\x5c\xc3\x9f\x0b\xd8\x44\x7e\xca\x26\x10\xe7\xaa\x86\xa2\x6a\xf8\ +\x28\x98\x7f\xaf\x31\x3f\xeb\xd2\x1f\x25\x0e\x11\x3f\xb5\xad\xfa\ +\x15\x6d\x6d\x40\x11\xeb\x7e\x93\x3f\x33\x6a\xf1\xdb\x0c\xfe\xdd\ +\x19\x83\x47\xb3\x7c\xdf\x9d\xc6\x39\xf8\x8b\xa6\x06\xbd\x18\x24\ +\x75\x6f\x96\x3a\xeb\xe7\x03\xf7\xc2\x42\x6d\x9d\x45\x93\xfd\xf2\ +\x62\xb2\x5f\x11\x0c\xe6\x4f\x0d\xdb\x01\x79\x1e\x1a\x4e\x6e\x8f\ +\x4b\x61\xe6\x2f\x4c\x9b\x71\xe6\x98\xe2\xb7\xca\xfc\x0c\x83\x5f\ +\x25\x7f\xcd\xdc\xcc\xd1\x42\x8d\x06\x95\xe6\x9b\xc6\xbd\x57\xe9\ +\x4c\xf8\x1c\x82\xb1\x72\xd1\xd4\xb0\x31\xa4\x74\x5a\xf7\x33\xd0\ +\x0e\x7e\x9d\xfd\xaf\xc4\x54\xcd\x06\x28\x94\x0b\x10\xd6\x05\x89\ +\xc2\x14\x89\x7a\xff\xf8\x9c\xd7\xa0\xc5\xcf\xb6\xd9\x06\xf3\xc0\ +\x71\x8f\xf7\x39\xf4\x58\xaf\xc5\xef\x10\x49\xbf\xf2\x80\xd6\x5c\ +\x40\x5a\x28\x4c\xdb\x2c\x7f\x42\xcc\xbf\x33\x41\xc2\xd2\xd0\xc8\ +\x2f\x63\x6f\x04\xab\x73\x80\xf8\xd9\x42\xf5\xcf\xc4\xfa\x1c\x0b\ +\xcc\xcd\x88\x7a\x9d\x02\x5a\xb4\x46\xef\xca\x4d\x4b\x2a\xfb\xc0\ +\xb1\x66\xfe\x9d\x71\x03\x62\x1b\xe0\x63\xb3\xd8\x72\xac\x2c\x0d\ +\x46\x0c\xa6\xa8\x1a\xd7\xb8\xcf\xd5\xa1\x79\x9c\xc2\x00\xb8\x0d\ +\x7e\x5c\x6b\xc6\x9f\x87\xff\x16\xbf\x3e\x46\xa1\xcc\x08\xf8\xc7\ +\x48\xe8\xa3\xcd\xf9\x2b\x1a\xc2\x96\x86\xec\xa4\x54\xf5\x0f\xbc\ +\x4b\xcc\x1d\xe9\x31\x35\x44\x3c\xce\xa6\x6b\x64\x27\x00\x7e\xec\ +\xbf\xca\x3e\x0b\xa5\x47\x68\xfe\xee\x25\x12\xe9\x61\x93\x9f\xf3\ +\x55\x53\x7e\xec\xd9\x52\x25\x47\xb0\xfe\xad\x09\x49\x8d\xfb\x9c\ +\xdf\x44\xbd\x2e\xc3\xef\x0a\xbf\xce\x6b\xa1\x29\xff\xa4\x6d\x91\ +\xfd\xdf\x9d\x31\x34\x2c\x6c\xfa\xc9\x0f\x88\xd4\x20\x30\x04\xfe\ +\xe1\xd6\x79\x66\x17\xeb\xdf\xd2\x80\x7c\xb2\x15\xeb\x75\xde\x63\ +\x7e\x06\xfc\xc6\xfc\xbb\x3a\x5b\xd9\x67\x03\xb6\x30\x73\x1b\x6b\ +\x00\x73\x0a\xee\x8e\xb5\x7e\x12\xeb\x03\x24\xf8\x08\x1d\x4d\x63\ +\xc7\x83\xf7\x03\xd6\xa3\xc6\x73\xb0\x36\x20\x8b\xa8\xc7\x9c\x6f\ +\xe6\x5f\xf1\x39\x7f\x6f\x65\x6b\xec\xbf\x29\xd3\x77\x63\xfe\xe7\ +\xcc\x1c\xb8\x36\x40\x7a\xb2\x0f\xef\xbb\x48\x02\x47\xbd\xd5\x3b\ +\x78\xec\x20\x7f\x73\x2e\x8b\x7a\x1c\x15\x38\x0d\xac\xf8\x1c\x67\ +\x5a\xfa\x8e\x35\x6b\xc5\x9e\xf9\xad\xbd\x9e\xf0\xd2\x02\x20\x56\ +\x7a\x4d\xc4\x3c\xf7\xd7\x76\xa3\x01\x8d\x47\x63\x1e\x45\x44\x7b\ +\x94\xaa\x86\xa4\xcf\xf1\xd3\xc3\x74\xf3\xbe\xad\xfa\x1e\xa4\x45\ +\xeb\x7a\xdc\x43\x1d\x80\x60\x0d\x51\x37\x09\xfc\xd6\x22\x9d\xcd\ +\xd7\x90\xe6\xb6\x67\xa2\x3d\xb2\xc9\x0f\xac\xf8\x14\xb1\x3d\x21\ +\x8d\xb7\xe4\xc6\xda\xb5\xb8\x8d\x3c\x3b\x5b\xeb\x23\x7c\x0e\xc5\ +\x7a\x4c\xfe\x48\x97\x71\x5c\x68\xc8\xdd\x2d\x2d\x81\x5f\x68\x6e\ +\xd9\x00\xeb\xe0\xb9\xe0\x9c\xd6\x28\x2f\x5a\x83\x6b\x5c\xf1\x7e\ +\x8d\xd1\xea\xef\x47\xba\xe9\xb8\xd6\x6d\x72\xef\xc3\x62\xc4\x63\ +\xee\x47\xfc\x3e\x03\xee\x0c\x00\x5e\x86\xdd\xc0\xfa\x80\x74\x27\ +\x3b\x69\xe6\x6e\xec\x8d\x86\x35\x92\xf7\xd9\x3e\x6e\x51\x52\x6b\ +\xeb\x7f\x55\x43\x17\xa9\x75\xfc\x06\x34\x0b\xdd\xb6\x0a\x24\x03\ +\x31\x8f\xb4\xc4\x76\xc8\x9b\xe1\xfc\x94\x99\x17\xc1\xa1\x03\xaa\ +\xd1\x3f\x21\xc6\x80\x66\xd5\xf5\x4a\x7f\xd1\x72\x7f\x81\x6f\xa1\ +\x29\xbf\x01\x53\x43\xd4\x6d\x5b\xb2\x6c\x32\xc8\x37\xb9\x49\x0a\ +\x1b\x75\xbc\x59\xff\xc0\xb5\x63\xae\xf1\x9c\x36\xd0\xe0\x07\xf4\ +\xc6\xfc\x74\x0f\xbe\x5f\xaf\xb7\xe1\xbd\x5d\x9c\xae\xad\xd5\x75\ +\xfd\x8b\xfa\x28\xdc\x55\x0d\x58\xff\x15\x1d\x8b\xe0\x4c\x00\x4b\ +\xc0\xf5\xe5\x0b\x74\xb4\x95\x5d\xa5\x97\x9b\xaf\xf4\x2e\x21\xc4\ +\xdf\xcf\x3d\xed\xe3\x70\x1f\x8c\x83\xf1\x7f\x19\xfc\x3f\x09\x17\ +\x9a\x76\xda\xf7\x3f\x89\xfc\xc4\xe4\x3c\x74\xa0\x66\x4b\x8c\xc7\ +\x7d\x4e\x5c\x25\x05\x39\xf9\x1d\xe4\x84\xcb\xe5\x20\x7d\x0a\xbc\ +\x8b\xfb\x8e\x07\xec\x4e\x91\x1d\xdf\x6e\x6f\xa7\x2f\xc9\x97\xb7\ +\xc6\xa4\x2b\xf9\x49\x7a\x9f\x9f\x43\xaf\x74\x0a\x48\xc2\xbe\x0c\ +\x94\x80\x14\x72\x4b\x57\xbd\x96\xd5\x7e\xc7\x69\x7c\xf7\xac\xe1\ +\xbb\xb3\x8c\xef\x9d\x3d\xf4\x9a\x29\xe4\xa3\x0f\x91\x9b\xbf\x2d\ +\xce\x50\x11\xb5\x21\x58\x9e\xa5\x0f\x60\x7f\x07\xf6\x51\x1c\xdf\ +\xb0\x6c\xe3\x3e\xc7\xcb\xe8\x61\xfe\x58\x1d\x50\xf2\x9b\xc3\x72\ +\x70\x7b\xdc\xde\x8d\xba\x9a\xc6\x33\xdf\xe7\x26\x28\x8e\xef\x83\ +\x24\x6a\xdc\xf3\xfc\x2c\xf4\x9f\xc2\xf5\x2d\xbc\xef\x6b\x31\x4a\ +\x4f\x23\x5f\xba\xf0\x0d\xf0\x99\xf9\xfd\xe1\xbc\xc2\xe7\x69\x95\ +\x0e\xc1\xfe\x3b\xe4\xde\x15\xf4\xd9\x09\xf4\xd8\x89\xfc\x28\x3d\ +\x57\x89\x8f\x83\x6b\x11\xb4\x65\xf1\x0d\x76\x0e\x3d\xcc\x69\xf4\ +\xb0\x59\xe0\x76\xa2\xcf\xc9\x7f\xa9\xd1\x86\x4a\x4f\xc1\xfe\x06\ +\x38\x92\xe9\x21\xfa\x19\x3d\x72\x4a\x1f\xa1\x13\x96\xde\xdd\x39\ +\x7a\x91\x7b\xf2\xec\x94\xed\x4f\xf4\xa1\xcb\xe8\x7d\x77\x35\x8f\ +\xeb\xbc\x75\x7f\x5b\xa5\x23\xe8\xb7\x6e\x71\xed\x47\x6f\x79\x15\ +\xfd\x5d\x79\xdd\x47\x9e\xfd\xf1\xc2\xbd\xb3\x88\x55\x8e\x7b\xc8\ +\x98\xd7\x79\x33\xea\xa5\xc3\xd6\x3d\xd4\xb4\xd7\xc0\x9f\xc2\x33\ +\x9f\x27\xbd\x74\x32\xee\xa5\x2d\xf4\x46\x3f\x2c\x77\xd1\xb3\xd5\ +\x98\xb9\xed\x67\xd1\x8b\xe5\xe2\xbd\x4a\x69\x7d\x50\x71\xef\x9f\ +\xff\xc2\x8c\x34\x07\xdb\x6d\xcc\xdb\xb9\xbf\x2e\xd2\x21\xf4\x34\ +\x5f\xa1\xc7\x28\xa3\xa6\x5c\x8e\xb8\xe9\x18\x7a\x8e\xce\xa8\x5b\ +\x8a\xa1\x8f\x28\xa5\x86\xa4\x0c\x7a\x88\x9b\xf0\xe9\x05\xd8\xca\ +\xf0\xeb\x2d\xd8\xc6\x60\x7b\x03\xe7\xcf\xf0\x3b\x35\x37\x9d\x80\ +\xed\x6d\xa0\x8c\x5a\xb4\x09\x14\x50\x8b\xf7\xd0\x8f\x5c\x43\x7c\ +\xcf\x20\x8e\x7f\x23\x9e\xbf\xa2\xce\x7e\x89\xb9\x59\xc5\xef\xdf\ +\xf6\x3e\xa6\xd7\xf7\xfb\xbb\x7c\x9e\x5e\x81\x86\x4f\x60\x7b\x1b\ +\xf8\x91\x6b\x1c\xfb\x2c\x04\xd9\x50\xa7\xde\x44\xfd\xfa\x02\xdc\ +\xbf\x60\x8d\x5c\xc3\xfa\x78\xb5\xd1\xda\xbe\xe5\x26\x25\xe9\xa6\ +\x23\xf1\x0b\xf4\x8c\xe0\x6d\x5b\xbb\xee\x5d\xac\xb7\xd1\xda\x7e\ +\x92\xe3\x1f\x0b\x0c\xb3\x17\ +\x00\x00\x03\x2f\ +\x89\ +\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ +\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\ +\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\ +\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\ +\x0d\xd7\x01\x42\x28\x9b\x78\x00\x00\x00\x07\x74\x49\x4d\x45\x07\ +\xe6\x0a\x19\x03\x13\x1b\x82\xa4\x64\xac\x00\x00\x02\xbc\x49\x44\ +\x41\x54\x68\x43\xed\x58\xbf\x6b\x14\x41\x18\x7d\xab\x06\x63\xa5\ +\x58\x89\x85\x4d\xd0\xc6\x1f\xa4\x10\x0b\x0b\x09\x28\x88\xfe\x01\ +\x56\x56\x22\x0a\x96\x16\x41\x93\x14\x07\x1a\x91\x14\xe9\x52\x44\ +\x0e\xed\x2d\x6d\x04\xb1\x31\x85\xad\x85\x04\x22\xa2\x45\x14\xc4\ +\x2a\x56\xe1\x88\x3f\xc6\x37\x7b\x3b\xcb\xec\x64\x67\x77\xe6\x76\ +\x99\xdd\x23\xfb\x85\x25\x37\xf3\x7d\x33\xdf\x7b\x6f\xbe\x99\x9d\ +\x3b\xa0\xb3\x4e\x81\x4e\x81\x4e\x81\x26\x15\x88\xf2\x92\x3f\x02\ +\xa6\xf6\x03\x57\x04\x70\x24\xcf\xdf\x40\xdf\xd6\x3e\xe0\xed\x43\ +\xe0\xab\x99\x7b\x17\x81\x45\xe0\x16\x83\x56\xf8\x4c\x9a\xc1\x0d\ +\xb7\x07\x04\x7b\x6f\x0e\x78\xa1\xe3\xc8\x10\x90\xca\x93\xe9\x3a\ +\x03\x0e\x36\x0c\xd6\x96\x7e\x40\x7c\xa7\xf5\x95\x38\xa0\x47\xd2\ +\x79\x59\x81\x67\xf9\x7c\x20\xbb\x37\xb6\x99\x02\xf7\x5f\x65\xbe\ +\x69\x3e\x93\xff\x58\xda\xfc\xff\x4c\xe5\xcf\x10\x60\xe7\x71\xe5\ +\x20\xf8\x57\xf3\x40\x2f\x30\xd0\xdc\x74\x2c\xeb\x01\x1d\x92\x80\ +\xb4\x14\xa3\x6c\x50\xf4\xf1\xb6\xb1\x27\x60\x96\x90\xd3\x72\x88\ +\xf7\xe0\x16\x19\x5a\x74\x11\xbb\x4e\x32\xa7\x49\x6a\x0a\xf2\x5e\ +\x81\x18\xfc\x26\xb3\x27\x8f\x4e\xa6\x26\x4c\x5e\xd3\x78\x11\x48\ +\xc1\xeb\x29\x48\xa4\x49\x12\x5e\x04\xbc\xa4\x09\x14\xbc\xb7\x08\ +\xc4\x1b\xf6\x84\x21\x2d\xdb\x4d\x6e\x64\xef\x15\x48\x49\x48\x22\ +\x0e\xe0\x79\x5c\x45\xdc\xf6\xf1\xdf\x4b\x80\x77\xc4\x7a\xcd\x9b\ +\x80\x4c\x2f\x49\xa8\xa7\x0c\x4e\x24\xc0\xb7\xff\xd0\x6e\x08\xfc\ +\x59\x02\x8e\x95\x8d\xf1\xf1\x8f\x44\xc0\x39\x81\xd4\xdd\xb0\x59\ +\x81\x1f\xf1\x7a\xd4\x64\x23\x11\xe8\xc9\x2b\x48\x52\x16\xf1\xe7\ +\x3c\x2b\x03\x49\x7f\x5c\x5e\x15\x2d\x3f\x79\xc1\xa4\x12\x70\x4f\ +\xe0\xaf\x0a\x91\x9f\xd9\x97\xfd\xe2\x53\x06\x3e\x19\x2c\xcb\xeb\ +\x29\x70\xb8\x20\x5d\xa9\xcb\x8b\x80\x54\x4c\x07\xaf\x91\xd8\xe2\ +\x8d\xf1\x4c\xdc\x76\x04\xaf\xc6\x3e\x10\xf8\xe5\x3b\x46\x67\xe5\ +\x4c\xa0\x47\xe5\xf5\x0d\x69\x4a\x33\x2f\xf0\xb1\x0a\x90\x51\xc7\ +\x3a\x11\x90\xe0\xf3\x94\x37\x49\x54\x6e\x73\xf5\x96\x81\x43\x3e\ +\xf3\x94\x12\x08\x06\x3e\x41\x7d\x5f\x60\xfb\x09\x70\xca\x95\x44\ +\x21\x81\xd0\xe0\x15\xe8\x39\x81\x4f\xae\x25\x65\x25\x60\xdb\xb0\ +\xae\xca\xd4\x12\xc7\x92\x5a\x05\x26\x8a\xe6\xb2\x12\xe0\x01\x7d\ +\xbd\x68\x60\x28\xdf\x5d\x81\x1d\x82\xb4\xfe\x3e\x65\x25\x40\x80\ +\xe7\x43\x81\x2c\xcb\xc3\xbb\xc8\x1d\x5b\x4c\x86\x40\x1d\x6f\x46\ +\x5b\xa2\x8a\xfd\xe9\xc9\x64\x2a\x9e\x69\xb3\x6c\xce\x56\x4c\x14\ +\x62\xf8\x39\x3d\x89\x49\xe8\xa8\xe6\xdc\x08\x81\xc6\x31\xc7\x77\ +\x15\xc7\x72\xd2\x31\x5a\x2e\x62\xc3\xe8\x9f\x8e\x93\x87\x08\xfb\ +\x62\x4b\x62\xae\x80\x2d\xae\xb5\xfd\x63\x4f\xc0\xfc\x61\xeb\x9b\ +\x26\xf5\x85\xc5\x08\xaf\x5b\x22\xfd\x8c\xc2\xc1\x83\x66\x53\xc7\ +\x94\x21\xc0\x57\x5e\xff\x37\x70\x33\x09\x90\x47\xd7\xb5\x96\x10\ +\x48\x61\x70\x13\xf7\x75\x4c\x99\x12\x9a\x05\xde\xb1\xe3\x79\xdb\ +\x40\x6b\x78\xfa\x0b\xc0\x9a\x8e\x2f\xf7\x2b\xdd\x63\xe0\x12\x89\ +\xdc\xe6\x8b\xed\x64\x1b\xc8\x10\xe4\x67\xa9\xbc\x09\xbe\x0d\xd8\ +\x3a\x0c\x9d\x02\x9d\x02\x7b\x5d\x81\xff\xd4\x3b\x9a\x8c\xa1\x62\ +\xf3\x6f\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ +\x00\x00\x02\xa9\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ +\x20\x33\x38\x43\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x33\ +\x38\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x32\x31\x43\ +\x33\x38\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x33\x30\x2e\x33\x38\ +\x38\x38\x20\x34\x20\x32\x31\x20\x34\x43\x31\x31\x2e\x36\x31\x31\ +\x32\x20\x34\x20\x34\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x34\x20\ +\x32\x31\x43\x34\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x31\x31\x2e\ +\x36\x31\x31\x32\x20\x33\x38\x20\x32\x31\x20\x33\x38\x5a\x22\x20\ +\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ +\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ +\x4d\x32\x31\x20\x31\x35\x4c\x32\x31\x20\x32\x37\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ +\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ +\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ +\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x2e\x30\x31\ +\x35\x36\x20\x32\x31\x2e\x30\x31\x35\x36\x4c\x32\x37\x20\x32\x31\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ +\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ +\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ +\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\ +\x33\x2e\x32\x32\x31\x36\x20\x33\x33\x2e\x32\x32\x31\x37\x4c\x34\ +\x31\x2e\x37\x30\x36\x39\x20\x34\x31\x2e\x37\x30\x37\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ +\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ +\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ +\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ \x00\x00\x02\x95\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ @@ -1121,6 +1812,97 @@ qt_resource_data = b"\ \x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ \x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\ \x3e\ +\x00\x00\x01\x33\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x32\ +\x20\x36\x56\x34\x32\x4d\x31\x37\x20\x31\x39\x4c\x31\x32\x20\x32\ +\x34\x4d\x31\x32\x20\x32\x34\x4c\x31\x37\x20\x32\x39\x4d\x31\x32\ +\x20\x32\x34\x48\x33\x36\x4d\x33\x31\x20\x31\x39\x4c\x33\x36\x20\ +\x32\x34\x4d\x33\x36\x20\x32\x34\x4c\x33\x31\x20\x32\x39\x4d\x36\ +\x20\x36\x4c\x36\x20\x34\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ +\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ +\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ +\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\ +\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ +\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ +\x67\x3e\ +\x00\x00\x02\x2c\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\ +\x20\x36\x48\x39\x43\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x36\ +\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x39\x56\x33\x39\x43\ +\x36\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x37\x2e\x33\x34\x33\x31\ +\x35\x20\x34\x32\x20\x39\x20\x34\x32\x48\x33\x39\x43\x34\x30\x2e\ +\x36\x35\x36\x39\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x36\x35\ +\x36\x39\x20\x34\x32\x20\x33\x39\x56\x32\x39\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x31\x37\x4c\ +\x32\x32\x20\x32\x39\x4c\x33\x34\x20\x32\x35\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x32\x39\x43\ +\x32\x37\x20\x31\x34\x20\x33\x30\x20\x31\x31\x20\x34\x30\x20\x37\ +\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ +\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ +\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ +\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ +\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ +\x00\x00\x01\xd7\ +\x3c\ +\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ +\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ +\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ +\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ +\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ +\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ +\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ +\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ +\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ +\x2e\x30\x30\x38\x33\x20\x31\x34\x2e\x31\x30\x30\x36\x56\x34\x32\ +\x2e\x30\x30\x30\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x31\x32\x20\x32\x36\x4c\x32\x34\x20\x31\x34\x4c\ +\x33\x36\x20\x32\x36\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ +\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ +\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ +\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ +\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ +\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ +\x64\x3d\x22\x4d\x31\x32\x20\x36\x48\x33\x36\x22\x20\x73\x74\x72\ +\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ +\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ +\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ +\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ +\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ +\x3c\x2f\x73\x76\x67\x3e\ \x00\x00\x02\xbf\ \x3c\ \x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ @@ -1167,732 +1949,6 @@ qt_resource_data = b"\ \x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ \x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ \x71\x75\x61\x72\x65\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x03\x3e\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x30\ -\x20\x32\x33\x56\x31\x34\x4c\x33\x31\x20\x34\x48\x31\x30\x43\x38\ -\x2e\x38\x39\x35\x34\x33\x20\x34\x20\x38\x20\x34\x2e\x38\x39\x35\ -\x34\x33\x20\x38\x20\x36\x56\x34\x32\x43\x38\x20\x34\x33\x2e\x31\ -\x30\x34\x36\x20\x38\x2e\x38\x39\x35\x34\x33\x20\x34\x34\x20\x31\ -\x30\x20\x34\x34\x48\x32\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ -\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\ -\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ -\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\ -\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\x33\x33\x48\x34\x31\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ -\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\ -\x33\x39\x48\x34\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x34\x31\x20\x33\x33\x4c\x33\x36\x20\x32\x38\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\ -\x20\x34\x34\x4c\x32\x37\x20\x33\x39\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ -\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ -\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x30\x20\x34\x56\x31\x34\x48\ -\x34\x30\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\ -\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\ -\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x02\x35\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ -\x20\x33\x38\x43\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x33\ -\x38\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x32\x31\x43\ -\x33\x38\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x33\x30\x2e\x33\x38\ -\x38\x38\x20\x34\x20\x32\x31\x20\x34\x43\x31\x31\x2e\x36\x31\x31\ -\x32\x20\x34\x20\x34\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x34\x20\ -\x32\x31\x43\x34\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x31\x31\x2e\ -\x36\x31\x31\x32\x20\x33\x38\x20\x32\x31\x20\x33\x38\x5a\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ -\x4d\x31\x35\x20\x32\x31\x4c\x32\x37\x20\x32\x31\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ -\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ -\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x33\x2e\x32\x32\ -\x31\x36\x20\x33\x33\x2e\x32\x32\x31\x37\x4c\x34\x31\x2e\x37\x30\ -\x36\x39\x20\x34\x31\x2e\x37\x30\x37\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ -\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\ -\x73\x76\x67\x3e\ -\x00\x00\x02\x2c\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\ -\x20\x36\x48\x39\x43\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x36\ -\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x39\x56\x33\x39\x43\ -\x36\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x37\x2e\x33\x34\x33\x31\ -\x35\x20\x34\x32\x20\x39\x20\x34\x32\x48\x33\x39\x43\x34\x30\x2e\ -\x36\x35\x36\x39\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x36\x35\ -\x36\x39\x20\x34\x32\x20\x33\x39\x56\x32\x39\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x38\x20\x31\x37\x4c\ -\x32\x32\x20\x32\x39\x4c\x33\x34\x20\x32\x35\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x32\x20\x32\x39\x43\ -\x32\x37\x20\x31\x34\x20\x33\x30\x20\x31\x31\x20\x34\x30\x20\x37\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ -\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ -\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ -\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x02\x2b\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x39\ -\x20\x36\x48\x39\x43\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x36\ -\x20\x37\x2e\x33\x34\x33\x31\x35\x20\x36\x20\x39\x56\x33\x39\x43\ -\x36\x20\x34\x30\x2e\x36\x35\x36\x39\x20\x37\x2e\x33\x34\x33\x31\ -\x35\x20\x34\x32\x20\x39\x20\x34\x32\x48\x33\x39\x43\x34\x30\x2e\ -\x36\x35\x36\x39\x20\x34\x32\x20\x34\x32\x20\x34\x30\x2e\x36\x35\ -\x36\x39\x20\x34\x32\x20\x33\x39\x56\x32\x39\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x32\x20\x31\x39\x4c\ -\x33\x38\x20\x37\x4c\x32\x36\x20\x31\x31\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ -\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ -\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x38\x20\x37\x43\x33\x33\ -\x20\x32\x32\x20\x33\x30\x20\x32\x35\x20\x32\x30\x20\x32\x39\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x03\x57\ -\x89\ -\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ -\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\ -\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\ -\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\ -\x0d\xd7\x01\x42\x28\x9b\x78\x00\x00\x00\x07\x74\x49\x4d\x45\x07\ -\xe7\x04\x13\x03\x14\x20\x67\x3f\x96\xef\x00\x00\x02\xe4\x49\x44\ -\x41\x54\x68\x43\xed\x58\x3b\x8b\x15\x31\x18\x3d\xd7\x07\xae\x95\ -\x62\x25\xb6\xa2\x8d\x0f\x2c\xc4\x46\x10\x41\x41\xf4\x07\x58\x59\ -\x89\x28\x58\x5a\x2c\xba\x6b\x71\x41\x57\xc4\xc2\xce\x42\xb9\x68\ -\x6f\x69\x23\x88\x8d\x62\x6f\x21\x82\x22\x5a\xa8\x20\x56\x6b\x25\ -\x17\x5f\xf1\x7c\x3b\xe6\x92\xc9\x24\x33\xc9\x24\xb3\xc3\xe2\x64\ -\x77\xd8\x9b\xef\x79\xce\x97\x2f\x99\xec\x05\x86\x31\x54\x60\xa8\ -\xc0\x50\x81\x3e\x2b\x30\x72\x25\xbf\x06\xec\x5c\x0f\x1c\x57\xc0\ -\x56\x97\xbe\x07\xd9\xf2\x3a\xe0\xe9\x15\xe0\x83\x9d\xbb\x42\x60\ -\x09\x38\x4b\xa3\x3b\x7c\xe6\x6c\xe3\x9e\xe7\x53\x82\xbd\xb8\x00\ -\x3c\x30\x71\x94\x08\x48\xe5\xc9\xf4\x35\x0d\x36\xf5\x0c\xd6\x97\ -\x7e\x4a\x7c\x7b\xcc\x95\xd8\x60\x5a\x52\x79\x4c\x83\x67\xfb\xbc\ -\x24\xbb\x27\xbe\x48\xab\x2c\x3f\xc1\x7c\x07\xf8\xcc\xfd\x61\x6b\ -\xf3\xef\x3d\x9d\xbf\x44\x80\xc2\x1d\x5a\x41\xf0\x8f\x16\x81\xf1\ -\x2a\x03\x75\xa6\x63\x5b\x4f\xa9\x10\x02\x32\x66\x18\x65\xc2\xa2\ -\xaf\xed\xb1\xe6\x09\xd8\x2d\x14\xb4\x1c\xdc\x1f\xfc\x2d\x06\x5b\ -\xad\x72\x92\x05\x05\xc9\x64\x14\xbd\x02\x26\x78\xc1\x60\xcf\x33\ -\xe1\x0a\x0e\x13\x45\xc0\x07\xd6\x27\x0f\x46\x91\x60\x18\x45\x20\ -\x21\x4f\x67\xae\xff\x17\x01\xdf\x86\xf5\xc9\x3b\x2b\xbb\x11\x38\ -\x7a\x05\x6c\xb0\xf6\xdc\x06\xcd\xfd\x41\x13\x25\xdb\x44\x3d\x04\ -\x78\x47\xcc\x3b\xa2\x09\x48\x7a\x01\xad\x9f\x26\x38\x23\x28\xbe\ -\xfd\x8b\x71\x1a\xea\xd7\x2d\x60\x7b\x93\x4f\x8c\xbe\x15\x81\xf0\ -\x04\x52\xf9\xf2\x98\x87\xfa\x72\x03\x38\x6c\xcb\xdb\xce\x5b\x11\ -\x18\xcb\x15\x44\x15\x3f\x2b\x9f\x9d\xa3\x0a\x5e\x9b\x2d\x40\xbd\ -\x90\x96\x2a\xda\xcb\xe9\x1c\x2c\xf4\x24\xf7\xfb\x0b\xe0\xb1\xc2\ -\x6f\x6d\x21\x9f\x29\xb3\xfe\xf1\xf1\x83\x37\x23\x4b\x7b\xdd\x04\ -\xb6\xf8\xb3\x35\x6b\xa2\x08\x48\xc5\x4c\xf0\x06\x89\x65\xde\x18\ -\xf7\x16\xf3\x30\xf0\xda\xf7\x32\xd4\xb7\x62\x93\x37\x83\x75\x59\ -\x04\x13\x18\xb3\xf2\x23\x85\xd9\x86\xb4\x83\x2d\x2a\xbc\x4a\x01\ -\xd2\xd6\x37\x88\x80\x80\x77\x55\xde\x26\x01\x95\x7a\xaf\x53\xea\ -\x36\xb0\xb9\x12\xb7\x46\xd0\x48\x20\x18\xbc\x4e\x92\x48\xe2\x12\ -\xd4\x77\x9e\x52\xbb\x6b\x30\x97\x54\xb5\x04\xa2\xc1\x67\x22\xc1\ -\x53\xea\x6d\x68\x4b\x79\x09\xf8\x36\x6c\x68\x65\xd2\xdb\x49\x32\ -\x29\x75\x17\xd8\x58\x97\xd3\x4b\x80\xdd\x7c\xaa\xce\x31\x48\x97\ -\xd8\x4e\x92\xe3\x02\xd4\x0f\x82\xf4\x7e\x3f\xe5\x25\x40\xdf\x83\ -\x41\x20\x9b\x8c\x32\x90\xe0\xd1\x77\xde\x97\xa6\x44\x20\xc7\x9b\ -\xd1\x99\x28\x9d\xc4\xec\x64\xb2\x2b\x5e\x9a\xb3\x6d\xf6\x39\x01\ -\xe4\x10\xa6\x93\xd0\x28\xf6\x9b\x70\x6c\x42\xdb\x0c\xe5\x9b\x1c\ -\xb8\x4b\x31\xda\x93\xf8\xac\xe3\xb0\x9d\x4c\x8c\xbe\x8b\xd8\x8a\ -\xf9\xd7\xec\x04\x24\x60\x3b\x12\xef\x7d\x58\xec\x15\xf0\xd9\xe5\ -\x95\xb7\x23\xe1\xc4\xd0\x0f\x81\xf6\x2b\x51\x21\x61\x7f\xb1\xf5\ -\xc9\xb0\x38\xb4\x34\xc2\xe3\x8a\x47\x56\x41\xf0\xdd\xe9\xa8\x4e\ -\x4b\x8f\x8f\x26\x84\x12\x01\xbe\xf2\x26\x3f\x81\x33\xff\x0c\xe4\ -\xe8\x3a\x99\x15\x6f\x86\x60\xdc\xc4\x13\x33\x4c\xa9\x85\xe6\x81\ -\x67\x14\xdc\xcf\x90\xa7\xab\x10\x93\xab\xc0\x73\x33\xb8\x73\x0d\ -\xaf\x03\x47\x48\xe4\x1c\x5f\x6c\xbb\xba\x42\x12\x13\x97\x20\xdf\ -\x49\xe5\x6d\xf0\x31\x31\x06\xdb\xa1\x02\x43\x05\x86\x0a\x74\x53\ -\x81\xbf\x22\x47\x9c\xdf\xb5\xb4\xa1\x9c\x00\x00\x00\x00\x49\x45\ -\x4e\x44\xae\x42\x60\x82\ -\x00\x00\x01\x33\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x32\ -\x20\x36\x56\x34\x32\x4d\x31\x37\x20\x31\x39\x4c\x31\x32\x20\x32\ -\x34\x4d\x31\x32\x20\x32\x34\x4c\x31\x37\x20\x32\x39\x4d\x31\x32\ -\x20\x32\x34\x48\x33\x36\x4d\x33\x31\x20\x31\x39\x4c\x33\x36\x20\ -\x32\x34\x4d\x33\x36\x20\x32\x34\x4c\x33\x31\x20\x32\x39\x4d\x36\ -\x20\x36\x4c\x36\x20\x34\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ -\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\ -\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\ -\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\ -\x67\x3e\ -\x00\x00\x01\x7a\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x37\x20\ -\x34\x32\x48\x34\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x31\x31\x20\x32\x36\x2e\x37\x31\x39\x39\x56\x33\ -\x34\x48\x31\x38\x2e\x33\x31\x37\x32\x4c\x33\x39\x20\x31\x33\x2e\ -\x33\x30\x38\x31\x4c\x33\x31\x2e\x36\x39\x35\x31\x20\x36\x4c\x31\ -\x31\x20\x32\x36\x2e\x37\x31\x39\x39\x5a\x22\x20\x66\x69\x6c\x6c\ -\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x03\x09\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\ -\x31\x30\x4c\x38\x20\x31\x33\x4c\x31\x34\x20\x37\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ -\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ -\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x32\x34\x4c\ -\x38\x20\x32\x37\x4c\x31\x34\x20\x32\x31\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ -\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ -\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x35\x20\x33\x38\x4c\x38\x20\ -\x34\x31\x4c\x31\x34\x20\x33\x35\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\ -\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\ -\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\ -\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\x20\x32\x34\x48\x34\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ -\x20\x33\x38\x48\x34\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ -\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ -\x20\x64\x3d\x22\x4d\x32\x31\x20\x31\x30\x48\x34\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ -\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ -\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x02\x8d\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x34\ -\x2e\x35\x20\x38\x43\x31\x33\x2e\x38\x34\x30\x36\x20\x38\x2e\x33\ -\x37\x36\x35\x32\x20\x31\x33\x2e\x32\x30\x36\x32\x20\x38\x2e\x37\ -\x39\x31\x30\x33\x20\x31\x32\x2e\x36\x20\x39\x2e\x32\x34\x30\x35\ -\x31\x43\x31\x31\x2e\x35\x36\x32\x35\x20\x31\x30\x2e\x30\x30\x39\ -\x37\x20\x31\x30\x2e\x36\x30\x37\x34\x20\x31\x30\x2e\x38\x38\x31\ -\x34\x20\x39\x2e\x37\x35\x20\x31\x31\x2e\x38\x34\x30\x32\x43\x36\ -\x2e\x37\x39\x33\x37\x37\x20\x31\x35\x2e\x31\x34\x36\x33\x20\x35\ -\x20\x31\x39\x2e\x34\x38\x39\x31\x20\x35\x20\x32\x34\x2e\x32\x34\ -\x35\x35\x43\x35\x20\x33\x34\x2e\x36\x30\x33\x33\x20\x31\x33\x2e\ -\x35\x30\x36\x36\x20\x34\x33\x20\x32\x34\x20\x34\x33\x43\x33\x34\ -\x2e\x34\x39\x33\x34\x20\x34\x33\x20\x34\x33\x20\x33\x34\x2e\x36\ -\x30\x33\x33\x20\x34\x33\x20\x32\x34\x2e\x32\x34\x35\x35\x43\x34\ -\x33\x20\x31\x39\x2e\x34\x38\x39\x31\x20\x34\x31\x2e\x32\x30\x36\ -\x32\x20\x31\x35\x2e\x31\x34\x36\x33\x20\x33\x38\x2e\x32\x35\x20\ -\x31\x31\x2e\x38\x34\x30\x32\x43\x33\x37\x2e\x33\x39\x32\x36\x20\ -\x31\x30\x2e\x38\x38\x31\x34\x20\x33\x36\x2e\x34\x33\x37\x35\x20\ -\x31\x30\x2e\x30\x30\x39\x37\x20\x33\x35\x2e\x34\x20\x39\x2e\x32\ -\x34\x30\x35\x31\x43\x33\x34\x2e\x37\x39\x33\x38\x20\x38\x2e\x37\ -\x39\x31\x30\x33\x20\x33\x34\x2e\x31\x35\x39\x34\x20\x38\x2e\x33\ -\x37\x36\x35\x32\x20\x33\x33\x2e\x35\x20\x38\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x20\x34\x56\x32\ -\x34\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\ -\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\ -\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\ -\x74\x65\x72\x22\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x01\xd7\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\ -\x2e\x30\x30\x38\x33\x20\x31\x34\x2e\x31\x30\x30\x36\x56\x34\x32\ -\x2e\x30\x30\x30\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x31\x32\x20\x32\x36\x4c\x32\x34\x20\x31\x34\x4c\ -\x33\x36\x20\x32\x36\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x31\x32\x20\x36\x48\x33\x36\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\ -\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\ -\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x04\x7e\ -\x00\ -\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\ -\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\ -\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x04\x00\x00\x12\x0b\x00\ -\x00\x12\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x64\x61\x5e\ -\xff\x60\x5a\x5f\xff\x64\x61\x5e\xff\x5f\x5f\x5e\xff\x58\x59\x5f\ -\xff\x5a\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\ -\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\xff\x5b\x5b\x5f\ -\xff\x5b\x5b\x5f\xff\x62\x60\x5e\xff\x64\x61\x5e\xff\x1b\x17\x13\ -\xff\x29\x33\x0d\xff\x19\x16\x12\xff\x32\x20\x11\xff\x50\x3c\x0d\ -\xff\x45\x33\x0e\xff\x42\x31\x0e\xff\x44\x32\x0e\xff\x44\x32\x0e\ -\xff\x44\x32\x0e\xff\x44\x32\x0e\xff\x44\x32\x0e\xff\x44\x33\x0e\ -\xff\x41\x31\x0e\xff\x20\x1a\x12\xff\x1a\x16\x13\xff\x25\x20\x1f\ -\xff\x66\xa0\x08\xff\x65\xa2\x08\xff\x4e\x57\x15\xff\x78\x54\x17\ -\xff\xb7\x83\x11\xff\xc2\x8d\x0f\xff\xb9\x86\x11\xff\xb9\x86\x11\ -\xff\xb8\x86\x11\xff\xba\x87\x10\xff\xba\x87\x10\xff\xb8\x86\x11\ -\xff\xc3\x8d\x0f\xff\x66\x4e\x18\xff\x1d\x1c\x20\xff\x24\x1e\x1d\ -\xff\x60\x97\x07\xff\x77\xc5\x00\xff\x6e\xb9\x01\xff\x44\x70\x0d\ -\xff\x3a\x32\x19\xff\x88\x64\x13\xff\xb4\x84\x11\xff\xb6\x85\x11\ -\xff\xb8\x85\x0f\xff\xb0\x7f\x0f\xff\xaf\x7f\x0f\xff\xad\x7e\x0f\ -\xff\xbb\x86\x0e\xff\x6f\x53\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x73\xbc\x01\xff\x6f\xb3\x03\xff\x7c\xcc\x00\ -\xff\x50\x81\x0d\xff\x1e\x15\x1a\xff\x46\x32\x0c\xff\x52\x39\x0b\ -\xff\x76\x57\x12\xff\xb0\x80\x10\xff\xb3\x82\x0e\xff\xae\x7e\x0f\ -\xff\xbc\x87\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x73\xbd\x01\xff\x71\xb9\x01\xff\x65\xa4\x06\ -\xff\x32\x35\x10\xff\x21\x24\x2d\xff\x11\x3a\x6a\xff\x11\x36\x62\ -\xff\x17\x1a\x23\xff\x49\x37\x13\xff\xad\x7e\x10\xff\xb0\x7f\x0f\ -\xff\xbb\x87\x0e\xff\x6e\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x73\xbd\x01\xff\x71\xbc\x03\xff\x3a\x48\x0d\ -\xff\x17\x20\x40\xff\x06\x68\xd2\xff\x02\x72\xee\xff\x02\x73\xef\ -\xff\x0d\x60\xbc\xff\x14\x1c\x2a\xff\x67\x4c\x12\xff\xb7\x85\x0f\ -\xff\xba\x86\x0e\xff\x6e\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x97\x07\xff\x76\xc2\x00\xff\x65\xa3\x07\xff\x28\x20\x11\ -\xff\x0c\x54\xa5\xff\x00\x72\xf1\xff\x04\x6a\xdb\xff\x04\x6a\xdb\ -\xff\x01\x74\xf4\xff\x0f\x44\x80\xff\x3a\x28\x0d\xff\xac\x7e\x12\ -\xff\xbc\x88\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x97\x07\xff\x77\xc4\x00\xff\x61\x9a\x07\xff\x24\x1e\x18\ -\xff\x09\x5e\xbc\xff\x02\x6e\xe6\xff\x03\x6b\xde\xff\x04\x6a\xdc\ -\xff\x02\x71\xeb\xff\x0c\x4d\x99\xff\x35\x25\x0e\xff\xa9\x7c\x12\ -\xff\xbd\x88\x0e\xff\x6d\x52\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x75\xc0\x01\xff\x6a\xae\x06\xff\x2c\x27\x0e\ -\xff\x11\x45\x83\xff\x00\x76\xf7\xff\x03\x6d\xe2\xff\x02\x6e\xe5\ -\xff\x00\x73\xf2\xff\x19\x3b\x5d\xff\x28\x1e\x11\xff\x94\x6d\x14\ -\xff\xc1\x8b\x0d\xff\x6c\x51\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x73\xbc\x01\xff\x74\xbf\x01\xff\x4b\x6d\x0d\ -\xff\x1c\x12\x21\xff\x10\x4d\x91\xff\x06\x69\xd3\xff\x07\x67\xcf\ -\xff\x13\x40\x77\xff\x23\x16\x18\xff\x1b\x19\x1f\xff\x4d\x3d\x18\ -\xff\xc2\x8b\x0d\xff\x6c\x51\x15\xff\x1a\x1a\x1e\xff\x24\x1e\x1d\ -\xff\x60\x98\x07\xff\x73\xbe\x01\xff\x6f\xb6\x02\xff\x71\xba\x02\ -\xff\x48\x66\x0d\xff\x27\x1e\x12\xff\x1e\x1d\x29\xff\x1f\x1b\x25\ -\xff\x2a\x25\x10\xff\x50\x78\x0c\xff\x35\x40\x17\xff\x1b\x16\x1e\ -\xff\x9c\x72\x10\xff\x74\x56\x15\xff\x19\x19\x1e\xff\x24\x1e\x1d\ -\xff\x60\x97\x07\xff\x73\xbd\x01\xff\x6f\xb5\x02\xff\x70\xb6\x02\ -\xff\x73\xbf\x01\xff\x65\xa3\x07\xff\x57\x83\x08\xff\x58\x86\x08\ -\xff\x68\xa9\x07\xff\x79\xca\x00\xff\x53\x7c\x0c\xff\x15\x0f\x1f\ -\xff\x5f\x49\x16\xff\x76\x58\x14\xff\x18\x18\x1e\xff\x24\x1f\x1d\ -\xff\x63\x9e\x06\xff\x75\xc1\x00\xff\x71\xb9\x02\xff\x72\xba\x01\ -\xff\x71\xb9\x02\xff\x74\xbf\x01\xff\x76\xc4\x01\xff\x76\xc4\x01\ -\xff\x73\xbc\x01\xff\x72\xbc\x01\xff\x6a\xab\x04\xff\x25\x26\x1b\ -\xff\x31\x28\x1b\xff\x59\x45\x17\xff\x1f\x1d\x1e\xff\x22\x1a\x1e\ -\xff\x4f\x75\x0d\xff\x6d\xb2\x03\xff\x69\xa9\x04\xff\x69\xaa\x04\ -\xff\x69\xaa\x04\xff\x69\xa9\x04\xff\x68\xa8\x04\xff\x68\xa8\x04\ -\xff\x69\xa9\x04\xff\x69\xaa\x04\xff\x6b\xae\x03\xff\x34\x40\x16\ -\xff\x21\x1a\x1e\xff\x27\x23\x1c\xff\x25\x21\x1d\xff\x24\x20\x1d\ -\xff\x24\x20\x1d\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\ -\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\ -\xff\x2a\x2c\x1b\xff\x2a\x2c\x1b\xff\x2b\x2c\x1b\xff\x27\x24\x1c\ -\xff\x25\x21\x1d\xff\x25\x21\x1d\xff\x25\x21\x1d\xff\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x03\x2f\ -\x89\ -\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ -\x00\x00\x30\x00\x00\x00\x30\x08\x06\x00\x00\x00\x57\x02\xf9\x87\ -\x00\x00\x00\x06\x62\x4b\x47\x44\x00\xff\x00\xff\x00\xff\xa0\xbd\ -\xa7\x93\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\ -\x0d\xd7\x01\x42\x28\x9b\x78\x00\x00\x00\x07\x74\x49\x4d\x45\x07\ -\xe6\x0a\x19\x03\x13\x1b\x82\xa4\x64\xac\x00\x00\x02\xbc\x49\x44\ -\x41\x54\x68\x43\xed\x58\xbf\x6b\x14\x41\x18\x7d\xab\x06\x63\xa5\ -\x58\x89\x85\x4d\xd0\xc6\x1f\xa4\x10\x0b\x0b\x09\x28\x88\xfe\x01\ -\x56\x56\x22\x0a\x96\x16\x41\x93\x14\x07\x1a\x91\x14\xe9\x52\x44\ -\x0e\xed\x2d\x6d\x04\xb1\x31\x85\xad\x85\x04\x22\xa2\x45\x14\xc4\ -\x2a\x56\xe1\x88\x3f\xc6\x37\x7b\x3b\xcb\xec\x64\x67\x77\xe6\x76\ -\x99\xdd\x23\xfb\x85\x25\x37\xf3\x7d\x33\xdf\x7b\x6f\xbe\x99\x9d\ -\x3b\xa0\xb3\x4e\x81\x4e\x81\x4e\x81\x26\x15\x88\xf2\x92\x3f\x02\ -\xa6\xf6\x03\x57\x04\x70\x24\xcf\xdf\x40\xdf\xd6\x3e\xe0\xed\x43\ -\xe0\xab\x99\x7b\x17\x81\x45\xe0\x16\x83\x56\xf8\x4c\x9a\xc1\x0d\ -\xb7\x07\x04\x7b\x6f\x0e\x78\xa1\xe3\xc8\x10\x90\xca\x93\xe9\x3a\ -\x03\x0e\x36\x0c\xd6\x96\x7e\x40\x7c\xa7\xf5\x95\x38\xa0\x47\xd2\ -\x79\x59\x81\x67\xf9\x7c\x20\xbb\x37\xb6\x99\x02\xf7\x5f\x65\xbe\ -\x69\x3e\x93\xff\x58\xda\xfc\xff\x4c\xe5\xcf\x10\x60\xe7\x71\xe5\ -\x20\xf8\x57\xf3\x40\x2f\x30\xd0\xdc\x74\x2c\xeb\x01\x1d\x92\x80\ -\xb4\x14\xa3\x6c\x50\xf4\xf1\xb6\xb1\x27\x60\x96\x90\xd3\x72\x88\ -\xf7\xe0\x16\x19\x5a\x74\x11\xbb\x4e\x32\xa7\x49\x6a\x0a\xf2\x5e\ -\x81\x18\xfc\x26\xb3\x27\x8f\x4e\xa6\x26\x4c\x5e\xd3\x78\x11\x48\ -\xc1\xeb\x29\x48\xa4\x49\x12\x5e\x04\xbc\xa4\x09\x14\xbc\xb7\x08\ -\xc4\x1b\xf6\x84\x21\x2d\xdb\x4d\x6e\x64\xef\x15\x48\x49\x48\x22\ -\x0e\xe0\x79\x5c\x45\xdc\xf6\xf1\xdf\x4b\x80\x77\xc4\x7a\xcd\x9b\ -\x80\x4c\x2f\x49\xa8\xa7\x0c\x4e\x24\xc0\xb7\xff\xd0\x6e\x08\xfc\ -\x59\x02\x8e\x95\x8d\xf1\xf1\x8f\x44\xc0\x39\x81\xd4\xdd\xb0\x59\ -\x81\x1f\xf1\x7a\xd4\x64\x23\x11\xe8\xc9\x2b\x48\x52\x16\xf1\xe7\ -\x3c\x2b\x03\x49\x7f\x5c\x5e\x15\x2d\x3f\x79\xc1\xa4\x12\x70\x4f\ -\xe0\xaf\x0a\x91\x9f\xd9\x97\xfd\xe2\x53\x06\x3e\x19\x2c\xcb\xeb\ -\x29\x70\xb8\x20\x5d\xa9\xcb\x8b\x80\x54\x4c\x07\xaf\x91\xd8\xe2\ -\x8d\xf1\x4c\xdc\x76\x04\xaf\xc6\x3e\x10\xf8\xe5\x3b\x46\x67\xe5\ -\x4c\xa0\x47\xe5\xf5\x0d\x69\x4a\x33\x2f\xf0\xb1\x0a\x90\x51\xc7\ -\x3a\x11\x90\xe0\xf3\x94\x37\x49\x54\x6e\x73\xf5\x96\x81\x43\x3e\ -\xf3\x94\x12\x08\x06\x3e\x41\x7d\x5f\x60\xfb\x09\x70\xca\x95\x44\ -\x21\x81\xd0\xe0\x15\xe8\x39\x81\x4f\xae\x25\x65\x25\x60\xdb\xb0\ -\xae\xca\xd4\x12\xc7\x92\x5a\x05\x26\x8a\xe6\xb2\x12\xe0\x01\x7d\ -\xbd\x68\x60\x28\xdf\x5d\x81\x1d\x82\xb4\xfe\x3e\x65\x25\x40\x80\ -\xe7\x43\x81\x2c\xcb\xc3\xbb\xc8\x1d\x5b\x4c\x86\x40\x1d\x6f\x46\ -\x5b\xa2\x8a\xfd\xe9\xc9\x64\x2a\x9e\x69\xb3\x6c\xce\x56\x4c\x14\ -\x62\xf8\x39\x3d\x89\x49\xe8\xa8\xe6\xdc\x08\x81\xc6\x31\xc7\x77\ -\x15\xc7\x72\xd2\x31\x5a\x2e\x62\xc3\xe8\x9f\x8e\x93\x87\x08\xfb\ -\x62\x4b\x62\xae\x80\x2d\xae\xb5\xfd\x63\x4f\xc0\xfc\x61\xeb\x9b\ -\x26\xf5\x85\xc5\x08\xaf\x5b\x22\xfd\x8c\xc2\xc1\x83\x66\x53\xc7\ -\x94\x21\xc0\x57\x5e\xff\x37\x70\x33\x09\x90\x47\xd7\xb5\x96\x10\ -\x48\x61\x70\x13\xf7\x75\x4c\x99\x12\x9a\x05\xde\xb1\xe3\x79\xdb\ -\x40\x6b\x78\xfa\x0b\xc0\x9a\x8e\x2f\xf7\x2b\xdd\x63\xe0\x12\x89\ -\xdc\xe6\x8b\xed\x64\x1b\xc8\x10\xe4\x67\xa9\xbc\x09\xbe\x0d\xd8\ -\x3a\x0c\x9d\x02\x9d\x02\x7b\x5d\x81\xff\xd4\x3b\x9a\x8c\xa1\x62\ -\xf3\x6f\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ -\x00\x00\x02\xa9\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x31\ -\x20\x33\x38\x43\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x33\ -\x38\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x33\x38\x20\x32\x31\x43\ -\x33\x38\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x33\x30\x2e\x33\x38\ -\x38\x38\x20\x34\x20\x32\x31\x20\x34\x43\x31\x31\x2e\x36\x31\x31\ -\x32\x20\x34\x20\x34\x20\x31\x31\x2e\x36\x31\x31\x32\x20\x34\x20\ -\x32\x31\x43\x34\x20\x33\x30\x2e\x33\x38\x38\x38\x20\x31\x31\x2e\ -\x36\x31\x31\x32\x20\x33\x38\x20\x32\x31\x20\x33\x38\x5a\x22\x20\ -\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\ -\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\ -\x4d\x32\x31\x20\x31\x35\x4c\x32\x31\x20\x32\x37\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\ -\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\ -\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x35\x2e\x30\x31\ -\x35\x36\x20\x32\x31\x2e\x30\x31\x35\x36\x4c\x32\x37\x20\x32\x31\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\ -\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\ -\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\ -\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\ -\x33\x2e\x32\x32\x31\x36\x20\x33\x33\x2e\x32\x32\x31\x37\x4c\x34\ -\x31\x2e\x37\x30\x36\x39\x20\x34\x31\x2e\x37\x30\x37\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ -\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ -\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x02\xf5\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x34\x33\ -\x20\x32\x33\x56\x31\x34\x43\x34\x33\x20\x31\x32\x2e\x38\x39\x35\ -\x34\x20\x34\x32\x2e\x31\x30\x34\x36\x20\x31\x32\x20\x34\x31\x20\ -\x31\x32\x48\x32\x34\x4c\x31\x39\x20\x36\x48\x37\x43\x35\x2e\x38\ -\x39\x35\x34\x33\x20\x36\x20\x35\x20\x36\x2e\x38\x39\x35\x34\x33\ -\x20\x35\x20\x38\x56\x34\x30\x43\x35\x20\x34\x31\x2e\x31\x30\x34\ -\x36\x20\x35\x2e\x38\x39\x35\x34\x33\x20\x34\x32\x20\x37\x20\x34\ -\x32\x48\x32\x32\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\ -\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\ -\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\ -\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\ -\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\ -\x3d\x22\x4d\x32\x37\x20\x33\x31\x4c\x34\x31\x20\x33\x31\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\ -\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\ -\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x37\x20\ -\x33\x37\x48\x34\x31\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x34\x31\x20\x33\x31\x4c\x33\x36\x20\x32\x36\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\ -\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\ -\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x33\x32\ -\x20\x34\x32\x4c\x32\x37\x20\x33\x37\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ -\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x2f\ -\x73\x76\x67\x3e\ -\x00\x00\x03\x69\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x38\ -\x2e\x32\x38\x35\x37\x20\x33\x37\x48\x33\x39\x2e\x37\x31\x34\x33\ -\x4d\x34\x32\x20\x34\x32\x4c\x33\x39\x2e\x37\x31\x34\x33\x20\x33\ -\x37\x4c\x34\x32\x20\x34\x32\x5a\x4d\x32\x36\x20\x34\x32\x4c\x32\ -\x38\x2e\x32\x38\x35\x37\x20\x33\x37\x4c\x32\x36\x20\x34\x32\x5a\ -\x4d\x32\x38\x2e\x32\x38\x35\x37\x20\x33\x37\x4c\x33\x34\x20\x32\ -\x34\x4c\x33\x39\x2e\x37\x31\x34\x33\x20\x33\x37\x48\x32\x38\x2e\ -\x32\x38\x35\x37\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\ -\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\ -\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\ -\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\ -\x64\x3d\x22\x4d\x31\x36\x20\x36\x4c\x31\x37\x20\x39\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ -\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ -\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x36\x20\x31\x31\ -\x48\x32\x38\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\ -\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\ -\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\ -\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\ -\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\ -\x22\x4d\x31\x30\x20\x31\x36\x43\x31\x30\x20\x31\x36\x20\x31\x31\ -\x2e\x37\x38\x39\x35\x20\x32\x32\x2e\x32\x36\x30\x39\x20\x31\x36\ -\x2e\x32\x36\x33\x32\x20\x32\x35\x2e\x37\x33\x39\x31\x43\x32\x30\ -\x2e\x37\x33\x36\x38\x20\x32\x39\x2e\x32\x31\x37\x34\x20\x32\x38\ -\x20\x33\x32\x20\x32\x38\x20\x33\x32\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\ -\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\ -\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\ -\x61\x74\x68\x20\x64\x3d\x22\x4d\x32\x34\x20\x31\x31\x43\x32\x34\ -\x20\x31\x31\x20\x32\x32\x2e\x32\x31\x30\x35\x20\x31\x39\x2e\x32\ -\x31\x37\x34\x20\x31\x37\x2e\x37\x33\x36\x38\x20\x32\x33\x2e\x37\ -\x38\x32\x36\x43\x31\x33\x2e\x32\x36\x33\x32\x20\x32\x38\x2e\x33\ -\x34\x37\x38\x20\x36\x20\x33\x32\x20\x36\x20\x33\x32\x22\x20\x73\ -\x74\x72\x6f\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\ -\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\ -\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\ -\x22\x73\x71\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\ -\x2f\x3e\x3c\x2f\x73\x76\x67\x3e\ -\x00\x00\x03\x06\ -\x3c\ -\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\ -\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\ -\x2d\x38\x22\x3f\x3e\x3c\x73\x76\x67\x20\x77\x69\x64\x74\x68\x3d\ -\x22\x33\x32\x22\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\ -\x20\x76\x69\x65\x77\x42\x6f\x78\x3d\x22\x30\x20\x30\x20\x34\x38\ -\x20\x34\x38\x22\x20\x66\x69\x6c\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\ -\x20\x78\x6d\x6c\x6e\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\ -\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\ -\x76\x67\x22\x3e\x3c\x70\x61\x74\x68\x20\x66\x69\x6c\x6c\x2d\x72\ -\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\x22\x20\x63\x6c\ -\x69\x70\x2d\x72\x75\x6c\x65\x3d\x22\x65\x76\x65\x6e\x6f\x64\x64\ -\x22\x20\x64\x3d\x22\x4d\x32\x34\x20\x34\x34\x43\x33\x35\x2e\x30\ -\x34\x35\x37\x20\x34\x34\x20\x34\x34\x20\x33\x35\x2e\x30\x34\x35\ -\x37\x20\x34\x34\x20\x32\x34\x43\x34\x34\x20\x31\x32\x2e\x39\x35\ -\x34\x33\x20\x33\x35\x2e\x30\x34\x35\x37\x20\x34\x20\x32\x34\x20\ -\x34\x43\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x20\x34\x20\x31\x32\ -\x2e\x39\x35\x34\x33\x20\x34\x20\x32\x34\x43\x34\x20\x33\x35\x2e\ -\x30\x34\x35\x37\x20\x31\x32\x2e\x39\x35\x34\x33\x20\x34\x34\x20\ -\x32\x34\x20\x34\x34\x5a\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\x22\ -\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\ -\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\x75\x61\x72\x65\ -\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\ -\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\ -\x20\x64\x3d\x22\x4d\x32\x34\x20\x32\x33\x43\x32\x36\x2e\x37\x36\ -\x31\x34\x20\x32\x33\x20\x32\x39\x20\x32\x30\x2e\x37\x36\x31\x34\ -\x20\x32\x39\x20\x31\x38\x43\x32\x39\x20\x31\x35\x2e\x32\x33\x38\ -\x36\x20\x32\x36\x2e\x37\x36\x31\x34\x20\x31\x33\x20\x32\x34\x20\ -\x31\x33\x43\x32\x31\x2e\x32\x33\x38\x36\x20\x31\x33\x20\x31\x39\ -\x20\x31\x35\x2e\x32\x33\x38\x36\x20\x31\x39\x20\x31\x38\x43\x31\ -\x39\x20\x32\x30\x2e\x37\x36\x31\x34\x20\x32\x31\x2e\x32\x33\x38\ -\x36\x20\x32\x33\x20\x32\x34\x20\x32\x33\x5a\x22\x20\x66\x69\x6c\ -\x6c\x3d\x22\x6e\x6f\x6e\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x3d\ -\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\x6f\x6b\x65\ -\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\x72\x6f\x6b\ -\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\ -\x72\x22\x2f\x3e\x3c\x70\x61\x74\x68\x20\x64\x3d\x22\x4d\x31\x30\ -\x2e\x30\x32\x32\x20\x33\x38\x2e\x33\x33\x32\x43\x31\x30\x2e\x33\ -\x36\x35\x37\x20\x33\x33\x2e\x31\x32\x30\x36\x20\x31\x34\x2e\x37\ -\x30\x31\x36\x20\x32\x39\x20\x32\x30\x20\x32\x39\x48\x32\x38\x43\ -\x33\x33\x2e\x32\x39\x31\x34\x20\x32\x39\x20\x33\x37\x2e\x36\x32\ -\x32\x39\x20\x33\x33\x2e\x31\x30\x39\x37\x20\x33\x37\x2e\x39\x37\ -\x36\x37\x20\x33\x38\x2e\x33\x31\x31\x33\x22\x20\x73\x74\x72\x6f\ -\x6b\x65\x3d\x22\x23\x30\x30\x36\x34\x66\x66\x22\x20\x73\x74\x72\ -\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3d\x22\x33\x22\x20\x73\x74\ -\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3d\x22\x73\x71\ -\x75\x61\x72\x65\x22\x20\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\ -\x65\x6a\x6f\x69\x6e\x3d\x22\x6d\x69\x74\x65\x72\x22\x2f\x3e\x3c\ -\x2f\x73\x76\x67\x3e\ " qt_resource_name = b"\ @@ -1904,58 +1960,60 @@ qt_resource_name = b"\ \x00\x6f\xa6\x53\ \x00\x69\ \x00\x63\x00\x6f\x00\x6e\x00\x73\ -\x00\x0b\ -\x03\xe5\x96\xa7\ -\x4f\xdd\ -\x5b\x58\x00\x5f\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x10\ -\x00\xa1\xff\xa7\ -\x51\x73\ -\x95\xed\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x12\ -\x08\x89\x84\xe7\ -\x52\x17\ -\x88\x68\x00\x5f\x00\x6c\x00\x69\x00\x73\x00\x74\x00\x2d\x00\x6d\x00\x69\x00\x64\x00\x64\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\ -\x00\x67\ -\x00\x15\ -\x0c\x16\xe9\x47\ -\x65\x87\ -\x4e\xf6\x59\x39\x00\x2d\x5f\x00\x00\x5f\x00\x66\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x6f\x00\x70\x00\x65\x00\x6e\ -\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0c\ -\x0d\x33\x25\x07\ -\x4e\x0a\ -\x4e\x00\x6b\x65\x00\x5f\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x0d\xe5\xef\x87\ +\x68\x21\ +\x9a\x8c\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x15\ \x0b\x6d\xc7\x27\ \x7f\x16\ \x8f\x91\x65\x87\x4e\xf6\x00\x5f\x00\x66\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x65\x00\x64\x00\x69\x00\x74\x00\x69\x00\x6e\x00\x67\ \x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0d\ -\x00\x58\x57\x5f\ -\x00\x4d\ -\x00\x5f\x00\x46\x00\x61\x00\x76\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\ +\x00\x11\ +\x0c\xda\xf9\x67\ +\x00\x6c\ +\x00\x61\x00\x62\x00\x65\x00\x6c\x00\x6d\x00\x65\x00\x5f\x00\x33\x00\x32\x00\x78\x00\x33\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\ +\ +\x00\x0b\ +\x05\x9c\x97\xc7\ +\x52\x17\ +\x88\x68\x00\x5f\x00\x6c\x00\x69\x00\x73\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0b\ +\x03\xe5\x96\xa7\ +\x4f\xdd\ +\x5b\x58\x00\x5f\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x12\ +\x08\x89\x84\xe7\ +\x52\x17\ +\x88\x68\x00\x5f\x00\x6c\x00\x69\x00\x73\x00\x74\x00\x2d\x00\x6d\x00\x69\x00\x64\x00\x64\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\ +\x00\x67\ +\x00\x16\ +\x04\xf6\x11\xe7\ +\x4f\x20\ +\x51\xfa\x00\x33\x00\x5f\x00\x65\x00\x66\x00\x66\x00\x65\x00\x72\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x74\x00\x68\x00\x72\x00\x65\ +\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x11\ +\x05\xa5\x86\x27\ +\x52\x20\ +\x96\x64\x00\x5f\x00\x64\x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\x00\x2d\x00\x74\x00\x77\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\ +\ +\x00\x20\ +\x0d\x45\x1a\xe7\ +\x8f\x6c\ +\x63\x62\x65\x87\x4e\xf6\x59\x39\x00\x31\x00\x5f\x00\x66\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x63\x00\x6f\x00\x6e\ +\x00\x76\x00\x65\x00\x72\x00\x73\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x10\ -\x0d\xe5\xef\x87\ -\x68\x21\ -\x9a\x8c\x00\x5f\x00\x63\x00\x68\x00\x65\x00\x63\x00\x6b\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0d\ -\x04\x8d\xa3\x67\ -\x95\x1a\ -\x70\xb9\x00\x5f\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x6f\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0a\ -\x09\xe7\x20\x07\ -\x71\x67\ -\x72\x47\x00\x5f\x00\x70\x00\x69\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x0d\x82\xeb\x67\ +\x7f\xfb\ +\x8b\xd1\x00\x5f\x00\x74\x00\x72\x00\x61\x00\x6e\x00\x73\x00\x6c\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x0d\ \x0d\x17\x1b\x07\ \x00\x56\ \x00\x4f\x00\x43\x00\x5f\x00\x33\x00\x32\x00\x78\x00\x33\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\x00\x12\ -\x01\xf3\xd8\xa7\ -\x8b\xbe\ -\x7f\x6e\x00\x5f\x00\x73\x00\x65\x00\x74\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x74\x00\x77\x00\x6f\x00\x2e\x00\x73\x00\x76\ -\x00\x67\ +\x00\x0a\ +\x09\xe7\x20\x07\ +\x71\x67\ +\x72\x47\x00\x5f\x00\x70\x00\x69\x00\x63\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x11\ \x04\x4f\x8c\xc7\ \x53\xbb\ @@ -1966,20 +2024,77 @@ qt_resource_name = b"\ \x4f\xdd\ \x5b\x58\x78\x6c\x76\xd8\x00\x5f\x00\x73\x00\x61\x00\x76\x00\x65\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ \ -\x00\x11\ -\x0c\xda\xf9\x67\ -\x00\x6c\ -\x00\x61\x00\x62\x00\x65\x00\x6c\x00\x6d\x00\x65\x00\x5f\x00\x33\x00\x32\x00\x78\x00\x33\x00\x32\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\ \x00\x0c\ \x06\x3a\xda\x87\ \x4e\x0b\ \x4e\x00\x6b\x65\x00\x5f\x00\x6e\x00\x65\x00\x78\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x11\ -\x05\xa5\x86\x27\ -\x52\x20\ -\x96\x64\x00\x5f\x00\x64\x00\x65\x00\x6c\x00\x65\x00\x74\x00\x65\x00\x2d\x00\x74\x00\x77\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\ -\ +\x00\x08\ +\x05\xa2\x42\xdf\ +\x00\x63\ +\x00\x6f\x00\x63\x00\x6f\x00\x2e\x00\x69\x00\x63\x00\x6f\ +\x00\x12\ +\x01\xf3\xd8\xa7\ +\x8b\xbe\ +\x7f\x6e\x00\x5f\x00\x73\x00\x65\x00\x74\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x2d\x00\x74\x00\x77\x00\x6f\x00\x2e\x00\x73\x00\x76\ +\x00\x67\ +\x00\x1d\ +\x05\xba\x83\xc7\ +\x8f\x6c\ +\x63\x62\x65\x87\x4e\xf6\x00\x31\x00\x5f\x00\x66\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x63\x00\x6f\x00\x6e\x00\x76\x00\x65\x00\x72\ +\x00\x73\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0c\ +\x0f\x37\x8d\x07\ +\x5f\x00\ +\x51\x73\x00\x5f\x00\x70\x00\x6f\x00\x77\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0f\ +\x0a\xf8\x43\x87\ +\x7f\x29\ +\x5c\x0f\x00\x5f\x00\x7a\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0b\ +\x02\xa2\x70\xa7\ +\x77\x3c\ +\x77\x5b\x00\x5f\x00\x65\x00\x79\x00\x65\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0c\ +\x0d\x33\x25\x07\ +\x4e\x0a\ +\x4e\x00\x6b\x65\x00\x5f\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0c\ +\x0a\x9f\x02\x47\ +\x00\x69\ +\x00\x6e\x00\x73\x00\x74\x00\x61\x00\x6e\x00\x63\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\ +\x00\x0d\ +\x04\x8d\xa3\x67\ +\x95\x1a\ +\x70\xb9\x00\x5f\x00\x61\x00\x6e\x00\x63\x00\x68\x00\x6f\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0b\ +\x0b\x19\xb3\x87\ +\x7f\x16\ +\x8f\x91\x00\x5f\x00\x65\x00\x64\x00\x69\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x10\ +\x00\xa1\xff\xa7\ +\x51\x73\ +\x95\xed\x00\x5f\x00\x63\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x09\ +\x06\xe6\xa9\x87\ +\x62\x11\ +\x76\x84\x00\x5f\x00\x6d\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x15\ +\x0c\x16\xe9\x47\ +\x65\x87\ +\x4e\xf6\x59\x39\x00\x2d\x5f\x00\x00\x5f\x00\x66\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x6f\x00\x70\x00\x65\x00\x6e\ +\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x0d\ +\x00\x58\x57\x5f\ +\x00\x4d\ +\x00\x5f\x00\x46\x00\x61\x00\x76\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x69\x00\x63\x00\x6f\ +\x00\x0c\ +\x05\xce\x27\xe7\ +\x00\x73\ +\x00\x65\x00\x6d\x00\x61\x00\x6e\x00\x74\x00\x69\x00\x63\x00\x2e\x00\x70\x00\x6e\x00\x67\ +\x00\x0e\ +\x09\x39\xda\xe7\ +\x65\x3e\ +\x59\x27\x00\x5f\x00\x7a\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x0d\ \x01\xee\x64\x87\ \x52\x20\ @@ -1989,121 +2104,67 @@ qt_resource_name = b"\ \x95\x2e\ \x76\xd8\x00\x5f\x00\x6b\x00\x65\x00\x79\x00\x62\x00\x6f\x00\x61\x00\x72\x00\x64\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\ \x00\x76\x00\x67\ -\x00\x12\ -\x01\xac\xf2\xe7\ -\x4e\x00\ -\x5b\xf9\x4e\x00\x00\x5f\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x74\x00\x6f\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\ -\x00\x67\ -\x00\x1d\ -\x05\xba\x83\xc7\ -\x8f\x6c\ -\x63\x62\x65\x87\x4e\xf6\x00\x31\x00\x5f\x00\x66\x00\x69\x00\x6c\x00\x65\x00\x2d\x00\x63\x00\x6f\x00\x6e\x00\x76\x00\x65\x00\x72\ -\x00\x73\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0f\ -\x0a\xf8\x43\x87\ -\x7f\x29\ -\x5c\x0f\x00\x5f\x00\x7a\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x6f\x00\x75\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x10\ +\x05\x64\x2c\x67\ +\x51\x68\ +\x5b\xbd\x00\x5f\x00\x66\x00\x75\x00\x6c\x00\x6c\x00\x77\x00\x69\x00\x64\x00\x74\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x16\ \x07\xf2\xb9\xe7\ \x4f\x20\ \x51\x65\x00\x33\x00\x5f\x00\x61\x00\x66\x00\x66\x00\x65\x00\x72\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x74\x00\x68\x00\x72\x00\x65\ \x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x16\ -\x04\xf6\x11\xe7\ -\x4f\x20\ -\x51\xfa\x00\x33\x00\x5f\x00\x65\x00\x66\x00\x66\x00\x65\x00\x72\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x74\x00\x68\x00\x72\x00\x65\ -\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0c\ -\x0a\x9f\x02\x47\ -\x00\x69\ -\x00\x6e\x00\x73\x00\x74\x00\x61\x00\x6e\x00\x63\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\x00\x10\ -\x05\x64\x2c\x67\ -\x51\x68\ -\x5b\xbd\x00\x5f\x00\x66\x00\x75\x00\x6c\x00\x6c\x00\x77\x00\x69\x00\x64\x00\x74\x00\x68\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0b\ -\x0b\x19\xb3\x87\ -\x7f\x16\ -\x8f\x91\x00\x5f\x00\x65\x00\x64\x00\x69\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0b\ -\x05\x9c\x97\xc7\ -\x52\x17\ -\x88\x68\x00\x5f\x00\x6c\x00\x69\x00\x73\x00\x74\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x0c\ -\x0f\x37\x8d\x07\ -\x5f\x00\ -\x51\x73\x00\x5f\x00\x70\x00\x6f\x00\x77\x00\x65\x00\x72\x00\x2e\x00\x73\x00\x76\x00\x67\ \x00\x0e\ \x02\xb9\x42\x87\ \x53\xbb\ \x98\x76\x90\xe8\x00\x5f\x00\x74\x00\x6f\x00\x2d\x00\x74\x00\x6f\x00\x70\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x08\ -\x05\xa2\x42\xdf\ -\x00\x63\ -\x00\x6f\x00\x63\x00\x6f\x00\x2e\x00\x69\x00\x63\x00\x6f\ -\x00\x0c\ -\x05\xce\x27\xe7\ -\x00\x73\ -\x00\x65\x00\x6d\x00\x61\x00\x6e\x00\x74\x00\x69\x00\x63\x00\x2e\x00\x70\x00\x6e\x00\x67\ -\x00\x0e\ -\x09\x39\xda\xe7\ -\x65\x3e\ -\x59\x27\x00\x5f\x00\x7a\x00\x6f\x00\x6f\x00\x6d\x00\x2d\x00\x69\x00\x6e\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x20\ -\x0d\x45\x1a\xe7\ -\x8f\x6c\ -\x63\x62\x65\x87\x4e\xf6\x59\x39\x00\x31\x00\x5f\x00\x66\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x63\x00\x6f\x00\x6e\ -\x00\x76\x00\x65\x00\x72\x00\x73\x00\x69\x00\x6f\x00\x6e\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x10\ -\x0d\x82\xeb\x67\ -\x7f\xfb\ -\x8b\xd1\x00\x5f\x00\x74\x00\x72\x00\x61\x00\x6e\x00\x73\x00\x6c\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ -\x00\x09\ -\x06\xe6\xa9\x87\ -\x62\x11\ -\x76\x84\x00\x5f\x00\x6d\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\ +\x00\x12\ +\x01\xac\xf2\xe7\ +\x4e\x00\ +\x5b\xf9\x4e\x00\x00\x5f\x00\x6f\x00\x6e\x00\x65\x00\x2d\x00\x74\x00\x6f\x00\x2d\x00\x6f\x00\x6e\x00\x65\x00\x2e\x00\x73\x00\x76\ +\x00\x67\ " qt_resource_struct_v1 = b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ -\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x24\x00\x00\x00\x03\ -\x00\x00\x01\x08\x00\x01\x00\x00\x00\x01\x00\x00\x10\x10\ -\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x03\xce\ -\x00\x00\x02\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x42\xc3\ -\x00\x00\x02\x90\x00\x00\x00\x00\x00\x01\x00\x00\x3c\x44\ -\x00\x00\x01\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x24\x51\ -\x00\x00\x04\x68\x00\x00\x00\x00\x00\x01\x00\x00\x5b\x0e\ -\x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x2b\xce\ +\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x25\x00\x00\x00\x03\ +\x00\x00\x04\x30\x00\x01\x00\x00\x00\x01\x00\x00\x59\x8d\ +\x00\x00\x03\xc2\x00\x00\x00\x00\x00\x01\x00\x00\x52\x6d\ +\x00\x00\x05\x56\x00\x00\x00\x00\x00\x01\x00\x00\x71\x5a\ +\x00\x00\x04\x90\x00\x00\x00\x00\x00\x01\x00\x00\x65\x99\ +\x00\x00\x02\x82\x00\x00\x00\x00\x00\x01\x00\x00\x39\x83\ +\x00\x00\x03\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x47\x36\ +\x00\x00\x05\x34\x00\x00\x00\x00\x00\x01\x00\x00\x6f\x7f\ +\x00\x00\x02\x26\x00\x00\x00\x00\x00\x01\x00\x00\x30\xa5\ +\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\x12\x13\ +\x00\x00\x01\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x2e\xcf\ +\x00\x00\x03\x86\x00\x01\x00\x00\x00\x01\x00\x00\x4f\xdb\ +\x00\x00\x00\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x19\xd3\ +\x00\x00\x04\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x6c\x18\ +\x00\x00\x00\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x0f\x06\ +\x00\x00\x02\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x35\x01\ +\x00\x00\x01\x30\x00\x00\x00\x00\x00\x01\x00\x00\x1c\x02\ +\x00\x00\x02\xac\x00\x00\x00\x00\x00\x01\x00\x00\x3f\x2a\ +\x00\x00\x04\x50\x00\x00\x00\x00\x00\x01\x00\x00\x5f\xb9\ +\x00\x00\x02\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x33\x1a\ +\x00\x00\x03\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x54\xb9\ +\x00\x00\x05\x02\x00\x00\x00\x00\x00\x01\x00\x00\x6d\x4f\ +\x00\x00\x00\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x15\xe1\ +\x00\x00\x04\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x62\xec\ +\x00\x00\x01\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x2b\x88\ +\x00\x00\x03\x68\x00\x00\x00\x00\x00\x01\x00\x00\x4c\x80\ +\x00\x00\x03\x0a\x00\x00\x00\x00\x00\x01\x00\x00\x44\xfd\ +\x00\x00\x03\xa6\x00\x00\x00\x00\x00\x01\x00\x00\x50\xef\ +\x00\x00\x04\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x68\x32\ +\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x02\x82\ +\x00\x00\x04\x00\x00\x00\x00\x00\x00\x01\x00\x00\x57\xc3\ +\x00\x00\x00\x74\x00\x00\x00\x00\x00\x01\x00\x00\x04\xd4\ +\x00\x00\x01\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x24\x50\ +\x00\x00\x03\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x4a\x98\ +\x00\x00\x01\x58\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xea\ +\x00\x00\x01\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x20\xe3\ \x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ -\x00\x00\x01\xd2\x00\x00\x00\x00\x00\x01\x00\x00\x29\xf8\ -\x00\x00\x01\x4e\x00\x01\x00\x00\x00\x01\x00\x00\x18\xbe\ -\x00\x00\x03\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x4d\x31\ -\x00\x00\x03\xec\x00\x00\x00\x00\x00\x01\x00\x00\x52\xbb\ -\x00\x00\x04\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x55\x70\ -\x00\x00\x04\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x5c\xe9\ -\x00\x00\x02\x68\x00\x00\x00\x00\x00\x01\x00\x00\x3a\x5c\ -\x00\x00\x03\x06\x00\x00\x00\x00\x00\x01\x00\x00\x45\x86\ -\x00\x00\x04\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x61\x6b\ -\x00\x00\x02\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x38\x75\ -\x00\x00\x05\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x6d\xb1\ -\x00\x00\x03\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x4b\x01\ -\x00\x00\x00\x60\x00\x00\x00\x00\x00\x01\x00\x00\x06\x1a\ -\x00\x00\x04\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x64\x9e\ -\x00\x00\x01\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x19\xd2\ -\x00\x00\x03\xce\x00\x00\x00\x00\x00\x01\x00\x00\x4f\x60\ -\x00\x00\x03\x46\x00\x00\x00\x00\x00\x01\x00\x00\x48\xc8\ -\x00\x00\x04\x12\x00\x00\x00\x00\x00\x01\x00\x00\x53\xf2\ -\x00\x00\x02\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x3e\xdd\ -\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x0d\xbe\ -\x00\x00\x00\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x0c\ -\x00\x00\x02\x22\x00\x00\x00\x00\x00\x01\x00\x00\x2e\x43\ -\x00\x00\x01\x88\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x19\ -\x00\x00\x00\xba\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xd6\ -\x00\x00\x04\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x67\x4b\ -\x00\x00\x05\x26\x00\x00\x00\x00\x00\x01\x00\x00\x6a\x44\ -\x00\x00\x01\x28\x00\x00\x00\x00\x00\x01\x00\x00\x16\x3c\ -\x00\x00\x04\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x58\x7d\ +\x00\x00\x02\xec\x00\x00\x00\x00\x00\x01\x00\x00\x42\x6c\ " qt_resource_struct_v2 = b"\ @@ -2111,79 +2172,81 @@ qt_resource_struct_v2 = b"\ \x00\x00\x00\x00\x00\x00\x00\x00\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\ \x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x24\x00\x00\x00\x03\ +\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x25\x00\x00\x00\x03\ \x00\x00\x00\x00\x00\x00\x00\x00\ -\x00\x00\x01\x08\x00\x01\x00\x00\x00\x01\x00\x00\x10\x10\ +\x00\x00\x04\x30\x00\x01\x00\x00\x00\x01\x00\x00\x59\x8d\ \x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x03\xce\ +\x00\x00\x03\xc2\x00\x00\x00\x00\x00\x01\x00\x00\x52\x6d\ \x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x02\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x42\xc3\ +\x00\x00\x05\x56\x00\x00\x00\x00\x00\x01\x00\x00\x71\x5a\ \x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x02\x90\x00\x00\x00\x00\x00\x01\x00\x00\x3c\x44\ +\x00\x00\x04\x90\x00\x00\x00\x00\x00\x01\x00\x00\x65\x99\ \x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x01\xa8\x00\x00\x00\x00\x00\x01\x00\x00\x24\x51\ +\x00\x00\x02\x82\x00\x00\x00\x00\x00\x01\x00\x00\x39\x83\ \x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x04\x68\x00\x00\x00\x00\x00\x01\x00\x00\x5b\x0e\ +\x00\x00\x03\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x47\x36\ +\x00\x00\x01\x89\x2e\x5c\xdb\x84\ +\x00\x00\x05\x34\x00\x00\x00\x00\x00\x01\x00\x00\x6f\x7f\ \x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x01\xfa\x00\x00\x00\x00\x00\x01\x00\x00\x2b\xce\ +\x00\x00\x02\x26\x00\x00\x00\x00\x00\x01\x00\x00\x30\xa5\ \x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x00\xb8\x00\x00\x00\x00\x00\x01\x00\x00\x12\x13\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x01\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x2e\xcf\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x03\x86\x00\x01\x00\x00\x00\x01\x00\x00\x4f\xdb\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x00\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x19\xd3\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x04\xdc\x00\x00\x00\x00\x00\x01\x00\x00\x6c\x18\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x00\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x0f\x06\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x02\x6c\x00\x00\x00\x00\x00\x01\x00\x00\x35\x01\ +\x00\x00\x01\x88\xb2\x73\x0b\xe3\ +\x00\x00\x01\x30\x00\x00\x00\x00\x00\x01\x00\x00\x1c\x02\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x02\xac\x00\x00\x00\x00\x00\x01\x00\x00\x3f\x2a\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x04\x50\x00\x00\x00\x00\x00\x01\x00\x00\x5f\xb9\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x02\x4e\x00\x00\x00\x00\x00\x01\x00\x00\x33\x1a\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x03\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x54\xb9\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x05\x02\x00\x00\x00\x00\x00\x01\x00\x00\x6d\x4f\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x00\xd4\x00\x00\x00\x00\x00\x01\x00\x00\x15\xe1\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x04\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x62\xec\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x01\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x2b\x88\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x03\x68\x00\x00\x00\x00\x00\x01\x00\x00\x4c\x80\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x03\x0a\x00\x00\x00\x00\x00\x01\x00\x00\x44\xfd\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x03\xa6\x00\x00\x00\x00\x00\x01\x00\x00\x50\xef\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x04\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x68\x32\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x00\x44\x00\x00\x00\x00\x00\x01\x00\x00\x02\x82\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x04\x00\x00\x00\x00\x00\x00\x01\x00\x00\x57\xc3\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x00\x74\x00\x00\x00\x00\x00\x01\x00\x00\x04\xd4\ +\x00\x00\x01\x88\xb2\x73\x0b\xbf\ +\x00\x00\x01\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x24\x50\ +\x00\x00\x01\x88\xb2\x73\x0b\xbf\ +\x00\x00\x03\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x4a\x98\ +\x00\x00\x01\x88\xad\x7f\xe2\x75\ +\x00\x00\x01\x58\x00\x00\x00\x00\x00\x01\x00\x00\x1d\xea\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ +\x00\x00\x01\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x20\xe3\ +\x00\x00\x01\x88\xad\x7f\xe2\x79\ \x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x01\xd2\x00\x00\x00\x00\x00\x01\x00\x00\x29\xf8\ \x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x01\x4e\x00\x01\x00\x00\x00\x01\x00\x00\x18\xbe\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x03\x9c\x00\x00\x00\x00\x00\x01\x00\x00\x4d\x31\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x03\xec\x00\x00\x00\x00\x00\x01\x00\x00\x52\xbb\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x04\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x55\x70\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x04\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x5c\xe9\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x02\x68\x00\x00\x00\x00\x00\x01\x00\x00\x3a\x5c\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x03\x06\x00\x00\x00\x00\x00\x01\x00\x00\x45\x86\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x04\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x61\x6b\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x02\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x38\x75\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x05\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x6d\xb1\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x03\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x4b\x01\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x00\x60\x00\x00\x00\x00\x00\x01\x00\x00\x06\x1a\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x04\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x64\x9e\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x01\x6e\x00\x00\x00\x00\x00\x01\x00\x00\x19\xd2\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x03\xce\x00\x00\x00\x00\x00\x01\x00\x00\x4f\x60\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x03\x46\x00\x00\x00\x00\x00\x01\x00\x00\x48\xc8\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x04\x12\x00\x00\x00\x00\x00\x01\x00\x00\x53\xf2\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x02\xb0\x00\x00\x00\x00\x00\x01\x00\x00\x3e\xdd\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x00\xd8\x00\x00\x00\x00\x00\x01\x00\x00\x0d\xbe\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x00\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x0c\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x02\x22\x00\x00\x00\x00\x00\x01\x00\x00\x2e\x43\ -\x00\x00\x01\x88\xad\xb5\x8f\x9d\ -\x00\x00\x01\x88\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x19\ -\x00\x00\x01\x88\xad\xb6\xba\xe9\ -\x00\x00\x00\xba\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xd6\ -\x00\x00\x01\x88\xad\x7f\xe2\x75\ -\x00\x00\x04\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x67\x4b\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x05\x26\x00\x00\x00\x00\x00\x01\x00\x00\x6a\x44\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x01\x28\x00\x00\x00\x00\x00\x01\x00\x00\x16\x3c\ -\x00\x00\x01\x88\xad\x7f\xe2\x79\ -\x00\x00\x04\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x58\x7d\ +\x00\x00\x02\xec\x00\x00\x00\x00\x00\x01\x00\x00\x42\x6c\ \x00\x00\x01\x88\xad\x7f\xe2\x79\ " diff --git a/isat.yaml b/isat.yaml index 9376fac..bec54bb 100644 --- a/isat.yaml +++ b/isat.yaml @@ -1,3 +1,4 @@ +contour_mode: all label: - color: '#000000' name: __background__ @@ -19,4 +20,5 @@ label: name: cake - color: '#5c3566' name: fence -language: zh +language: en +mask_alpha: 0.5 diff --git a/main.py b/main.py index 6791910..cea8269 100644 --- a/main.py +++ b/main.py @@ -12,5 +12,5 @@ if __name__ == '__main__': app = QtWidgets.QApplication(['']) mainwindow = MainWindow() mainwindow.show() - sys.exit(app.exec_()) + sys.exit(app.exec()) diff --git a/segment_any/segment_any.py b/segment_any/segment_any.py index e4f4ef7..9f8a2f5 100644 --- a/segment_any/segment_any.py +++ b/segment_any/segment_any.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # @Author : LG -from segment_anything import sam_model_registry, SamPredictor +from segment_anything import sam_model_registry, SamPredictor, SamAutomaticMaskGenerator import torch import numpy as np @@ -16,33 +16,35 @@ class SegAny: self.model_type = "vit_h" else: raise ValueError('The checkpoint named {} is not supported.'.format(checkpoint)) + torch.cuda.empty_cache() self.device = 'cuda' if torch.cuda.is_available() else 'cpu' sam = sam_model_registry[self.model_type](checkpoint=checkpoint) sam.to(device=self.device) - self.predictor = SamPredictor(sam) + self.predictor = SamAutomaticMaskGenerator(sam) + self.predictor_with_point_prompt = SamPredictor(sam) self.image = None def set_image(self, image): self.image = image - self.predictor.set_image(image) + self.predictor_with_point_prompt.set_image(image) def reset_image(self): - self.predictor.reset_image() + self.predictor_with_point_prompt.reset_image() self.image = None torch.cuda.empty_cache() - def predict(self, input_point, input_label): + def predict_with_point_prompt(self, input_point, input_label): input_point = np.array(input_point) input_label = np.array(input_label) - masks, scores, logits = self.predictor.predict( + masks, scores, logits = self.predictor_with_point_prompt.predict( point_coords=input_point, point_labels=input_label, multimask_output=True, ) mask_input = logits[np.argmax(scores), :, :] # Choose the model's best mask - masks, _, _ = self.predictor.predict( + masks, _, _ = self.predictor_with_point_prompt.predict( point_coords=input_point, point_labels=input_label, mask_input=mask_input[None, :, :], @@ -50,3 +52,28 @@ class SegAny: ) torch.cuda.empty_cache() return masks + + def predict(self, image): + self.image = image + masks = self.predictor.generate(image) + torch.cuda.empty_cache() + return masks + + +if __name__ == '__main__': + from PIL import Image + import time + import matplotlib.pyplot as plt + time1 = time.time() + seg = SegAny('sam_vit_h_4b8939.pth') + image = np.array(Image.open('../example/images/000000000113.jpg')) + time2 = time.time() + print(time2-time1) + # seg.set_image() + masks = seg.predict(image) + print(time.time() - time2) + print(masks) + for mask in masks: + mask = mask['segmentation'] + plt.imshow(mask) + plt.show() diff --git a/segment_anything/__init__.py b/segment_anything/__init__.py new file mode 100644 index 0000000..d576507 --- /dev/null +++ b/segment_anything/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from .build_sam import ( + build_sam, + build_sam_vit_h, + build_sam_vit_l, + build_sam_vit_b, + sam_model_registry, +) +from .build_sam_baseline import sam_model_registry_baseline +from .predictor import SamPredictor +from .automatic_mask_generator import SamAutomaticMaskGenerator diff --git a/segment_anything/automatic_mask_generator.py b/segment_anything/automatic_mask_generator.py new file mode 100644 index 0000000..427ebeb --- /dev/null +++ b/segment_anything/automatic_mask_generator.py @@ -0,0 +1,374 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torchvision.ops.boxes import batched_nms, box_area # type: ignore + +from typing import Any, Dict, List, Optional, Tuple + +from .modeling import Sam +from .predictor import SamPredictor +from .utils.amg import ( + MaskData, + area_from_rle, + batch_iterator, + batched_mask_to_box, + box_xyxy_to_xywh, + build_all_layer_point_grids, + calculate_stability_score, + coco_encode_rle, + generate_crop_boxes, + is_box_near_crop_edge, + mask_to_rle_pytorch, + remove_small_regions, + rle_to_mask, + uncrop_boxes_xyxy, + uncrop_masks, + uncrop_points, +) + + +class SamAutomaticMaskGenerator: + def __init__( + self, + model: Sam, + points_per_side: Optional[int] = 32, + points_per_batch: int = 64, + pred_iou_thresh: float = 0.88, + stability_score_thresh: float = 0.95, + stability_score_offset: float = 1.0, + box_nms_thresh: float = 0.7, + crop_n_layers: int = 0, + crop_nms_thresh: float = 0.7, + crop_overlap_ratio: float = 512 / 1500, + crop_n_points_downscale_factor: int = 1, + point_grids: Optional[List[np.ndarray]] = None, + min_mask_region_area: int = 0, + output_mode: str = "binary_mask", + ) -> None: + """ + Using a SAM model, generates masks for the entire image. + Generates a grid of point prompts over the image, then filters + low quality and duplicate masks. The default settings are chosen + for SAM with a ViT-H backbone. + + Arguments: + model (Sam): The SAM model to use for mask prediction. + points_per_side (int or None): The number of points to be sampled + along one side of the image. The total number of points is + points_per_side**2. If None, 'point_grids' must provide explicit + point sampling. + points_per_batch (int): Sets the number of points run simultaneously + by the model. Higher numbers may be faster but use more GPU memory. + pred_iou_thresh (float): A filtering threshold in [0,1], using the + model's predicted mask quality. + stability_score_thresh (float): A filtering threshold in [0,1], using + the stability of the mask under changes to the cutoff used to binarize + the model's mask predictions. + stability_score_offset (float): The amount to shift the cutoff when + calculated the stability score. + box_nms_thresh (float): The box IoU cutoff used by non-maximal + suppression to filter duplicate masks. + crop_n_layers (int): If >0, mask prediction will be run again on + crops of the image. Sets the number of layers to run, where each + layer has 2**i_layer number of image crops. + crop_nms_thresh (float): The box IoU cutoff used by non-maximal + suppression to filter duplicate masks between different crops. + crop_overlap_ratio (float): Sets the degree to which crops overlap. + In the first crop layer, crops will overlap by this fraction of + the image length. Later layers with more crops scale down this overlap. + crop_n_points_downscale_factor (int): The number of points-per-side + sampled in layer n is scaled down by crop_n_points_downscale_factor**n. + point_grids (list(np.ndarray) or None): A list over explicit grids + of points used for sampling, normalized to [0,1]. The nth grid in the + list is used in the nth crop layer. Exclusive with points_per_side. + min_mask_region_area (int): If >0, postprocessing will be applied + to remove disconnected regions and holes in masks with area smaller + than min_mask_region_area. Requires opencv. + output_mode (str): The form masks are returned in. Can be 'binary_mask', + 'uncompressed_rle', or 'coco_rle'. 'coco_rle' requires pycocotools. + For large resolutions, 'binary_mask' may consume large amounts of + memory. + """ + + assert (points_per_side is None) != ( + point_grids is None + ), "Exactly one of points_per_side or point_grid must be provided." + if points_per_side is not None: + self.point_grids = build_all_layer_point_grids( + points_per_side, + crop_n_layers, + crop_n_points_downscale_factor, + ) + elif point_grids is not None: + self.point_grids = point_grids + else: + raise ValueError("Can't have both points_per_side and point_grid be None.") + + assert output_mode in [ + "binary_mask", + "uncompressed_rle", + "coco_rle", + ], f"Unknown output_mode {output_mode}." + if output_mode == "coco_rle": + from pycocotools import mask as mask_utils # type: ignore # noqa: F401 + + if min_mask_region_area > 0: + import cv2 # type: ignore # noqa: F401 + + self.predictor = SamPredictor(model) + self.points_per_batch = points_per_batch + self.pred_iou_thresh = pred_iou_thresh + self.stability_score_thresh = stability_score_thresh + self.stability_score_offset = stability_score_offset + self.box_nms_thresh = box_nms_thresh + self.crop_n_layers = crop_n_layers + self.crop_nms_thresh = crop_nms_thresh + self.crop_overlap_ratio = crop_overlap_ratio + self.crop_n_points_downscale_factor = crop_n_points_downscale_factor + self.min_mask_region_area = min_mask_region_area + self.output_mode = output_mode + + @torch.no_grad() + def generate(self, image: np.ndarray, multimask_output: bool = True) -> List[Dict[str, Any]]: + """ + Generates masks for the given image. + + Arguments: + image (np.ndarray): The image to generate masks for, in HWC uint8 format. + + Returns: + list(dict(str, any)): A list over records for masks. Each record is + a dict containing the following keys: + segmentation (dict(str, any) or np.ndarray): The mask. If + output_mode='binary_mask', is an array of shape HW. Otherwise, + is a dictionary containing the RLE. + bbox (list(float)): The box around the mask, in XYWH format. + area (int): The area in pixels of the mask. + predicted_iou (float): The model's own prediction of the mask's + quality. This is filtered by the pred_iou_thresh parameter. + point_coords (list(list(float))): The point coordinates input + to the model to generate this mask. + stability_score (float): A measure of the mask's quality. This + is filtered on using the stability_score_thresh parameter. + crop_box (list(float)): The crop of the image used to generate + the mask, given in XYWH format. + """ + + # Generate masks + mask_data = self._generate_masks(image, multimask_output) + + # Filter small disconnected regions and holes in masks + if self.min_mask_region_area > 0: + mask_data = self.postprocess_small_regions( + mask_data, + self.min_mask_region_area, + max(self.box_nms_thresh, self.crop_nms_thresh), + ) + + # Encode masks + if self.output_mode == "coco_rle": + mask_data["segmentations"] = [coco_encode_rle(rle) for rle in mask_data["rles"]] + elif self.output_mode == "binary_mask": + mask_data["segmentations"] = [rle_to_mask(rle) for rle in mask_data["rles"]] + else: + mask_data["segmentations"] = mask_data["rles"] + + # Write mask records + curr_anns = [] + for idx in range(len(mask_data["segmentations"])): + ann = { + "segmentation": mask_data["segmentations"][idx], + "area": area_from_rle(mask_data["rles"][idx]), + "bbox": box_xyxy_to_xywh(mask_data["boxes"][idx]).tolist(), + "predicted_iou": mask_data["iou_preds"][idx].item(), + "point_coords": [mask_data["points"][idx].tolist()], + "stability_score": mask_data["stability_score"][idx].item(), + "crop_box": box_xyxy_to_xywh(mask_data["crop_boxes"][idx]).tolist(), + } + curr_anns.append(ann) + + return curr_anns + + def _generate_masks(self, image: np.ndarray, multimask_output: bool = True) -> MaskData: + orig_size = image.shape[:2] + crop_boxes, layer_idxs = generate_crop_boxes( + orig_size, self.crop_n_layers, self.crop_overlap_ratio + ) + + # Iterate over image crops + data = MaskData() + for crop_box, layer_idx in zip(crop_boxes, layer_idxs): + crop_data = self._process_crop(image, crop_box, layer_idx, orig_size, multimask_output) + data.cat(crop_data) + + # Remove duplicate masks between crops + if len(crop_boxes) > 1: + # Prefer masks from smaller crops + scores = 1 / box_area(data["crop_boxes"]) + scores = scores.to(data["boxes"].device) + keep_by_nms = batched_nms( + data["boxes"].float(), + scores, + torch.zeros_like(data["boxes"][:, 0]), # categories + iou_threshold=self.crop_nms_thresh, + ) + data.filter(keep_by_nms) + + data.to_numpy() + return data + + def _process_crop( + self, + image: np.ndarray, + crop_box: List[int], + crop_layer_idx: int, + orig_size: Tuple[int, ...], + multimask_output: bool = True, + ) -> MaskData: + # Crop the image and calculate embeddings + x0, y0, x1, y1 = crop_box + cropped_im = image[y0:y1, x0:x1, :] + cropped_im_size = cropped_im.shape[:2] + self.predictor.set_image(cropped_im) + + # Get points for this crop + points_scale = np.array(cropped_im_size)[None, ::-1] + points_for_image = self.point_grids[crop_layer_idx] * points_scale + + # Generate masks for this crop in batches + data = MaskData() + for (points,) in batch_iterator(self.points_per_batch, points_for_image): + batch_data = self._process_batch(points, cropped_im_size, crop_box, orig_size, multimask_output) + data.cat(batch_data) + del batch_data + self.predictor.reset_image() + + # Remove duplicates within this crop. + keep_by_nms = batched_nms( + data["boxes"].float(), + data["iou_preds"], + torch.zeros_like(data["boxes"][:, 0]), # categories + iou_threshold=self.box_nms_thresh, + ) + data.filter(keep_by_nms) + + # Return to the original image frame + data["boxes"] = uncrop_boxes_xyxy(data["boxes"], crop_box) + data["points"] = uncrop_points(data["points"], crop_box) + data["crop_boxes"] = torch.tensor([crop_box for _ in range(len(data["rles"]))]) + + return data + + def _process_batch( + self, + points: np.ndarray, + im_size: Tuple[int, ...], + crop_box: List[int], + orig_size: Tuple[int, ...], + multimask_output: bool = True, + ) -> MaskData: + orig_h, orig_w = orig_size + + # Run model on this batch + transformed_points = self.predictor.transform.apply_coords(points, im_size) + in_points = torch.as_tensor(transformed_points, device=self.predictor.device) + in_labels = torch.ones(in_points.shape[0], dtype=torch.int, device=in_points.device) + masks, iou_preds, _ = self.predictor.predict_torch( + in_points[:, None, :], + in_labels[:, None], + multimask_output=multimask_output, + return_logits=True, + ) + + # Serialize predictions and store in MaskData + data = MaskData( + masks=masks.flatten(0, 1), + iou_preds=iou_preds.flatten(0, 1), + points=torch.as_tensor(points.repeat(masks.shape[1], axis=0)), + ) + del masks + + # Filter by predicted IoU + if self.pred_iou_thresh > 0.0: + keep_mask = data["iou_preds"] > self.pred_iou_thresh + data.filter(keep_mask) + + # Calculate stability score + data["stability_score"] = calculate_stability_score( + data["masks"], self.predictor.model.mask_threshold, self.stability_score_offset + ) + if self.stability_score_thresh > 0.0: + keep_mask = data["stability_score"] >= self.stability_score_thresh + data.filter(keep_mask) + + # Threshold masks and calculate boxes + data["masks"] = data["masks"] > self.predictor.model.mask_threshold + data["boxes"] = batched_mask_to_box(data["masks"]) + + # Filter boxes that touch crop boundaries + keep_mask = ~is_box_near_crop_edge(data["boxes"], crop_box, [0, 0, orig_w, orig_h]) + if not torch.all(keep_mask): + data.filter(keep_mask) + + # Compress to RLE + data["masks"] = uncrop_masks(data["masks"], crop_box, orig_h, orig_w) + data["rles"] = mask_to_rle_pytorch(data["masks"]) + del data["masks"] + + return data + + @staticmethod + def postprocess_small_regions( + mask_data: MaskData, min_area: int, nms_thresh: float + ) -> MaskData: + """ + Removes small disconnected regions and holes in masks, then reruns + box NMS to remove any new duplicates. + + Edits mask_data in place. + + Requires open-cv as a dependency. + """ + if len(mask_data["rles"]) == 0: + return mask_data + + # Filter small disconnected regions and holes + new_masks = [] + scores = [] + for rle in mask_data["rles"]: + mask = rle_to_mask(rle) + + mask, changed = remove_small_regions(mask, min_area, mode="holes") + unchanged = not changed + mask, changed = remove_small_regions(mask, min_area, mode="islands") + unchanged = unchanged and not changed + + new_masks.append(torch.as_tensor(mask).unsqueeze(0)) + # Give score=0 to changed masks and score=1 to unchanged masks + # so NMS will prefer ones that didn't need postprocessing + scores.append(float(unchanged)) + + # Recalculate boxes and remove any new duplicates + masks = torch.cat(new_masks, dim=0) + boxes = batched_mask_to_box(masks) + keep_by_nms = batched_nms( + boxes.float(), + torch.as_tensor(scores), + torch.zeros_like(boxes[:, 0]), # categories + iou_threshold=nms_thresh, + ) + + # Only recalculate RLEs for masks that have changed + for i_mask in keep_by_nms: + if scores[i_mask] == 0.0: + mask_torch = masks[i_mask].unsqueeze(0) + mask_data["rles"][i_mask] = mask_to_rle_pytorch(mask_torch)[0] + mask_data["boxes"][i_mask] = boxes[i_mask] # update res directly + mask_data.filter(keep_by_nms) + + return mask_data diff --git a/segment_anything/build_sam.py b/segment_anything/build_sam.py new file mode 100644 index 0000000..b280cf4 --- /dev/null +++ b/segment_anything/build_sam.py @@ -0,0 +1,113 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch + +from functools import partial + +from .modeling import ImageEncoderViT, MaskDecoderHQ, PromptEncoder, Sam, TwoWayTransformer + + +def build_sam_vit_h(checkpoint=None): + return _build_sam( + encoder_embed_dim=1280, + encoder_depth=32, + encoder_num_heads=16, + encoder_global_attn_indexes=[7, 15, 23, 31], + checkpoint=checkpoint, + ) + + +build_sam = build_sam_vit_h + + +def build_sam_vit_l(checkpoint=None): + return _build_sam( + encoder_embed_dim=1024, + encoder_depth=24, + encoder_num_heads=16, + encoder_global_attn_indexes=[5, 11, 17, 23], + checkpoint=checkpoint, + ) + + +def build_sam_vit_b(checkpoint=None): + return _build_sam( + encoder_embed_dim=768, + encoder_depth=12, + encoder_num_heads=12, + encoder_global_attn_indexes=[2, 5, 8, 11], + checkpoint=checkpoint, + ) + + +sam_model_registry = { + "default": build_sam_vit_h, + "vit_h": build_sam_vit_h, + "vit_l": build_sam_vit_l, + "vit_b": build_sam_vit_b, +} + + +def _build_sam( + encoder_embed_dim, + encoder_depth, + encoder_num_heads, + encoder_global_attn_indexes, + checkpoint=None, +): + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + sam = Sam( + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ), + prompt_encoder=PromptEncoder( + embed_dim=prompt_embed_dim, + image_embedding_size=(image_embedding_size, image_embedding_size), + input_image_size=(image_size, image_size), + mask_in_chans=16, + ), + mask_decoder=MaskDecoderHQ( + num_multimask_outputs=3, + transformer=TwoWayTransformer( + depth=2, + embedding_dim=prompt_embed_dim, + mlp_dim=2048, + num_heads=8, + ), + transformer_dim=prompt_embed_dim, + iou_head_depth=3, + iou_head_hidden_dim=256, + vit_dim=encoder_embed_dim, + ), + pixel_mean=[123.675, 116.28, 103.53], + pixel_std=[58.395, 57.12, 57.375], + ) + # sam.eval() + if checkpoint is not None: + with open(checkpoint, "rb") as f: + state_dict = torch.load(f) + info = sam.load_state_dict(state_dict, strict=False) + print(info) + for n, p in sam.named_parameters(): + if 'hf_token' not in n and 'hf_mlp' not in n and 'compress_vit_feat' not in n and 'embedding_encoder' not in n and 'embedding_maskfeature' not in n: + p.requires_grad = False + + return sam diff --git a/segment_anything/build_sam_baseline.py b/segment_anything/build_sam_baseline.py new file mode 100644 index 0000000..8f14970 --- /dev/null +++ b/segment_anything/build_sam_baseline.py @@ -0,0 +1,107 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch + +from functools import partial + +from .modeling import ImageEncoderViT, MaskDecoder, PromptEncoder, Sam, TwoWayTransformer + + +def build_sam_vit_h(checkpoint=None): + return _build_sam( + encoder_embed_dim=1280, + encoder_depth=32, + encoder_num_heads=16, + encoder_global_attn_indexes=[7, 15, 23, 31], + checkpoint=checkpoint, + ) + + +build_sam = build_sam_vit_h + + +def build_sam_vit_l(checkpoint=None): + return _build_sam( + encoder_embed_dim=1024, + encoder_depth=24, + encoder_num_heads=16, + encoder_global_attn_indexes=[5, 11, 17, 23], + checkpoint=checkpoint, + ) + + +def build_sam_vit_b(checkpoint=None): + return _build_sam( + encoder_embed_dim=768, + encoder_depth=12, + encoder_num_heads=12, + encoder_global_attn_indexes=[2, 5, 8, 11], + checkpoint=checkpoint, + ) + + +sam_model_registry_baseline = { + "default": build_sam_vit_h, + "vit_h": build_sam_vit_h, + "vit_l": build_sam_vit_l, + "vit_b": build_sam_vit_b, +} + + +def _build_sam( + encoder_embed_dim, + encoder_depth, + encoder_num_heads, + encoder_global_attn_indexes, + checkpoint=None, +): + prompt_embed_dim = 256 + image_size = 1024 + vit_patch_size = 16 + image_embedding_size = image_size // vit_patch_size + sam = Sam( + image_encoder=ImageEncoderViT( + depth=encoder_depth, + embed_dim=encoder_embed_dim, + img_size=image_size, + mlp_ratio=4, + norm_layer=partial(torch.nn.LayerNorm, eps=1e-6), + num_heads=encoder_num_heads, + patch_size=vit_patch_size, + qkv_bias=True, + use_rel_pos=True, + global_attn_indexes=encoder_global_attn_indexes, + window_size=14, + out_chans=prompt_embed_dim, + ), + prompt_encoder=PromptEncoder( + embed_dim=prompt_embed_dim, + image_embedding_size=(image_embedding_size, image_embedding_size), + input_image_size=(image_size, image_size), + mask_in_chans=16, + ), + mask_decoder=MaskDecoder( + num_multimask_outputs=3, + transformer=TwoWayTransformer( + depth=2, + embedding_dim=prompt_embed_dim, + mlp_dim=2048, + num_heads=8, + ), + transformer_dim=prompt_embed_dim, + iou_head_depth=3, + iou_head_hidden_dim=256, + ), + pixel_mean=[123.675, 116.28, 103.53], + pixel_std=[58.395, 57.12, 57.375], + ) + sam.eval() + if checkpoint is not None: + with open(checkpoint, "rb") as f: + state_dict = torch.load(f) + sam.load_state_dict(state_dict) + return sam \ No newline at end of file diff --git a/segment_anything/modeling/__init__.py b/segment_anything/modeling/__init__.py new file mode 100644 index 0000000..71172d2 --- /dev/null +++ b/segment_anything/modeling/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from .sam import Sam +from .image_encoder import ImageEncoderViT +from .mask_decoder_hq import MaskDecoderHQ +from .mask_decoder import MaskDecoder +from .prompt_encoder import PromptEncoder +from .transformer import TwoWayTransformer diff --git a/segment_anything/modeling/common.py b/segment_anything/modeling/common.py new file mode 100644 index 0000000..2bf1523 --- /dev/null +++ b/segment_anything/modeling/common.py @@ -0,0 +1,43 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn + +from typing import Type + + +class MLPBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + mlp_dim: int, + act: Type[nn.Module] = nn.GELU, + ) -> None: + super().__init__() + self.lin1 = nn.Linear(embedding_dim, mlp_dim) + self.lin2 = nn.Linear(mlp_dim, embedding_dim) + self.act = act() + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return self.lin2(self.act(self.lin1(x))) + + +# From https://github.com/facebookresearch/detectron2/blob/main/detectron2/layers/batch_norm.py # noqa +# Itself from https://github.com/facebookresearch/ConvNeXt/blob/d1fa8f6fef0a165b27399986cc2bdacc92777e40/models/convnext.py#L119 # noqa +class LayerNorm2d(nn.Module): + def __init__(self, num_channels: int, eps: float = 1e-6) -> None: + super().__init__() + self.weight = nn.Parameter(torch.ones(num_channels)) + self.bias = nn.Parameter(torch.zeros(num_channels)) + self.eps = eps + + def forward(self, x: torch.Tensor) -> torch.Tensor: + u = x.mean(1, keepdim=True) + s = (x - u).pow(2).mean(1, keepdim=True) + x = (x - u) / torch.sqrt(s + self.eps) + x = self.weight[:, None, None] * x + self.bias[:, None, None] + return x diff --git a/segment_anything/modeling/image_encoder.py b/segment_anything/modeling/image_encoder.py new file mode 100644 index 0000000..7048651 --- /dev/null +++ b/segment_anything/modeling/image_encoder.py @@ -0,0 +1,398 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from typing import Optional, Tuple, Type + +from .common import LayerNorm2d, MLPBlock + + +# This class and its supporting functions below lightly adapted from the ViTDet backbone available at: https://github.com/facebookresearch/detectron2/blob/main/detectron2/modeling/backbone/vit.py # noqa +class ImageEncoderViT(nn.Module): + def __init__( + self, + img_size: int = 1024, + patch_size: int = 16, + in_chans: int = 3, + embed_dim: int = 768, + depth: int = 12, + num_heads: int = 12, + mlp_ratio: float = 4.0, + out_chans: int = 256, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_abs_pos: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + global_attn_indexes: Tuple[int, ...] = (), + ) -> None: + """ + Args: + img_size (int): Input image size. + patch_size (int): Patch size. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + depth (int): Depth of ViT. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_abs_pos (bool): If True, use absolute positional embeddings. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. + global_attn_indexes (list): Indexes for blocks using global attention. + """ + super().__init__() + self.img_size = img_size + + self.patch_embed = PatchEmbed( + kernel_size=(patch_size, patch_size), + stride=(patch_size, patch_size), + in_chans=in_chans, + embed_dim=embed_dim, + ) + + self.pos_embed: Optional[nn.Parameter] = None + if use_abs_pos: + # Initialize absolute positional embedding with pretrain image size. + self.pos_embed = nn.Parameter( + torch.zeros(1, img_size // patch_size, img_size // patch_size, embed_dim) + ) + + self.blocks = nn.ModuleList() + for i in range(depth): + block = Block( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + norm_layer=norm_layer, + act_layer=act_layer, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + window_size=window_size if i not in global_attn_indexes else 0, + input_size=(img_size // patch_size, img_size // patch_size), + ) + self.blocks.append(block) + + self.neck = nn.Sequential( + nn.Conv2d( + embed_dim, + out_chans, + kernel_size=1, + bias=False, + ), + LayerNorm2d(out_chans), + nn.Conv2d( + out_chans, + out_chans, + kernel_size=3, + padding=1, + bias=False, + ), + LayerNorm2d(out_chans), + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.patch_embed(x) + if self.pos_embed is not None: + x = x + self.pos_embed + + interm_embeddings=[] + for blk in self.blocks: + x = blk(x) + if blk.window_size == 0: + interm_embeddings.append(x) + + x = self.neck(x.permute(0, 3, 1, 2)) + + return x, interm_embeddings + + +class Block(nn.Module): + """Transformer blocks with support of window attention and residual propagation blocks""" + + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = True, + norm_layer: Type[nn.Module] = nn.LayerNorm, + act_layer: Type[nn.Module] = nn.GELU, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + window_size: int = 0, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads in each ViT block. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + norm_layer (nn.Module): Normalization layer. + act_layer (nn.Module): Activation layer. + use_rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + window_size (int): Window size for window attention blocks. If it equals 0, then + use global attention. + input_size (tuple(int, int) or None): Input resolution for calculating the relative + positional parameter size. + """ + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + use_rel_pos=use_rel_pos, + rel_pos_zero_init=rel_pos_zero_init, + input_size=input_size if window_size == 0 else (window_size, window_size), + ) + + self.norm2 = norm_layer(dim) + self.mlp = MLPBlock(embedding_dim=dim, mlp_dim=int(dim * mlp_ratio), act=act_layer) + + self.window_size = window_size + + def forward(self, x: torch.Tensor) -> torch.Tensor: + shortcut = x + x = self.norm1(x) + # Window partition + if self.window_size > 0: + H, W = x.shape[1], x.shape[2] + x, pad_hw = window_partition(x, self.window_size) + + x = self.attn(x) + # Reverse window partition + if self.window_size > 0: + x = window_unpartition(x, self.window_size, pad_hw, (H, W)) + + x = shortcut + x + x = x + self.mlp(self.norm2(x)) + + return x + + +class Attention(nn.Module): + """Multi-head Attention block with relative position embeddings.""" + + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = True, + use_rel_pos: bool = False, + rel_pos_zero_init: bool = True, + input_size: Optional[Tuple[int, int]] = None, + ) -> None: + """ + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + qkv_bias (bool): If True, add a learnable bias to query, key, value. + rel_pos (bool): If True, add relative positional embeddings to the attention map. + rel_pos_zero_init (bool): If True, zero initialize relative positional parameters. + input_size (tuple(int, int) or None): Input resolution for calculating the relative + positional parameter size. + """ + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.proj = nn.Linear(dim, dim) + + self.use_rel_pos = use_rel_pos + if self.use_rel_pos: + assert ( + input_size is not None + ), "Input size must be provided if using relative positional encoding." + # initialize relative positional embeddings + self.rel_pos_h = nn.Parameter(torch.zeros(2 * input_size[0] - 1, head_dim)) + self.rel_pos_w = nn.Parameter(torch.zeros(2 * input_size[1] - 1, head_dim)) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + B, H, W, _ = x.shape + # qkv with shape (3, B, nHead, H * W, C) + qkv = self.qkv(x).reshape(B, H * W, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4) + # q, k, v with shape (B * nHead, H * W, C) + q, k, v = qkv.reshape(3, B * self.num_heads, H * W, -1).unbind(0) + + attn = (q * self.scale) @ k.transpose(-2, -1) + + if self.use_rel_pos: + attn = add_decomposed_rel_pos(attn, q, self.rel_pos_h, self.rel_pos_w, (H, W), (H, W)) + + attn = attn.softmax(dim=-1) + x = (attn @ v).view(B, self.num_heads, H, W, -1).permute(0, 2, 3, 1, 4).reshape(B, H, W, -1) + x = self.proj(x) + + return x + + +def window_partition(x: torch.Tensor, window_size: int) -> Tuple[torch.Tensor, Tuple[int, int]]: + """ + Partition into non-overlapping windows with padding if needed. + Args: + x (tensor): input tokens with [B, H, W, C]. + window_size (int): window size. + + Returns: + windows: windows after partition with [B * num_windows, window_size, window_size, C]. + (Hp, Wp): padded height and width before partition + """ + B, H, W, C = x.shape + + pad_h = (window_size - H % window_size) % window_size + pad_w = (window_size - W % window_size) % window_size + if pad_h > 0 or pad_w > 0: + x = F.pad(x, (0, 0, 0, pad_w, 0, pad_h)) + Hp, Wp = H + pad_h, W + pad_w + + x = x.view(B, Hp // window_size, window_size, Wp // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows, (Hp, Wp) + + +def window_unpartition( + windows: torch.Tensor, window_size: int, pad_hw: Tuple[int, int], hw: Tuple[int, int] +) -> torch.Tensor: + """ + Window unpartition into original sequences and removing padding. + Args: + windows (tensor): input tokens with [B * num_windows, window_size, window_size, C]. + window_size (int): window size. + pad_hw (Tuple): padded height and width (Hp, Wp). + hw (Tuple): original height and width (H, W) before padding. + + Returns: + x: unpartitioned sequences with [B, H, W, C]. + """ + Hp, Wp = pad_hw + H, W = hw + B = windows.shape[0] // (Hp * Wp // window_size // window_size) + x = windows.view(B, Hp // window_size, Wp // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, Hp, Wp, -1) + + if Hp > H or Wp > W: + x = x[:, :H, :W, :].contiguous() + return x + + +def get_rel_pos(q_size: int, k_size: int, rel_pos: torch.Tensor) -> torch.Tensor: + """ + Get relative positional embeddings according to the relative positions of + query and key sizes. + Args: + q_size (int): size of query q. + k_size (int): size of key k. + rel_pos (Tensor): relative position embeddings (L, C). + + Returns: + Extracted positional embeddings according to relative positions. + """ + max_rel_dist = int(2 * max(q_size, k_size) - 1) + # Interpolate rel pos if needed. + if rel_pos.shape[0] != max_rel_dist: + # Interpolate rel pos. + rel_pos_resized = F.interpolate( + rel_pos.reshape(1, rel_pos.shape[0], -1).permute(0, 2, 1), + size=max_rel_dist, + mode="linear", + ) + rel_pos_resized = rel_pos_resized.reshape(-1, max_rel_dist).permute(1, 0) + else: + rel_pos_resized = rel_pos + + # Scale the coords with short length if shapes for q and k are different. + q_coords = torch.arange(q_size)[:, None] * max(k_size / q_size, 1.0) + k_coords = torch.arange(k_size)[None, :] * max(q_size / k_size, 1.0) + relative_coords = (q_coords - k_coords) + (k_size - 1) * max(q_size / k_size, 1.0) + + return rel_pos_resized[relative_coords.long()] + + +def add_decomposed_rel_pos( + attn: torch.Tensor, + q: torch.Tensor, + rel_pos_h: torch.Tensor, + rel_pos_w: torch.Tensor, + q_size: Tuple[int, int], + k_size: Tuple[int, int], +) -> torch.Tensor: + """ + Calculate decomposed Relative Positional Embeddings from :paper:`mvitv2`. + https://github.com/facebookresearch/mvit/blob/19786631e330df9f3622e5402b4a419a263a2c80/mvit/models/attention.py # noqa B950 + Args: + attn (Tensor): attention map. + q (Tensor): query q in the attention layer with shape (B, q_h * q_w, C). + rel_pos_h (Tensor): relative position embeddings (Lh, C) for height axis. + rel_pos_w (Tensor): relative position embeddings (Lw, C) for width axis. + q_size (Tuple): spatial sequence size of query q with (q_h, q_w). + k_size (Tuple): spatial sequence size of key k with (k_h, k_w). + + Returns: + attn (Tensor): attention map with added relative positional embeddings. + """ + q_h, q_w = q_size + k_h, k_w = k_size + Rh = get_rel_pos(q_h, k_h, rel_pos_h) + Rw = get_rel_pos(q_w, k_w, rel_pos_w) + + B, _, dim = q.shape + r_q = q.reshape(B, q_h, q_w, dim) + rel_h = torch.einsum("bhwc,hkc->bhwk", r_q, Rh) + rel_w = torch.einsum("bhwc,wkc->bhwk", r_q, Rw) + + attn = ( + attn.view(B, q_h, q_w, k_h, k_w) + rel_h[:, :, :, :, None] + rel_w[:, :, :, None, :] + ).view(B, q_h * q_w, k_h * k_w) + + return attn + + +class PatchEmbed(nn.Module): + """ + Image to Patch Embedding. + """ + + def __init__( + self, + kernel_size: Tuple[int, int] = (16, 16), + stride: Tuple[int, int] = (16, 16), + padding: Tuple[int, int] = (0, 0), + in_chans: int = 3, + embed_dim: int = 768, + ) -> None: + """ + Args: + kernel_size (Tuple): kernel size of the projection layer. + stride (Tuple): stride of the projection layer. + padding (Tuple): padding size of the projection layer. + in_chans (int): Number of input image channels. + embed_dim (int): Patch embedding dimension. + """ + super().__init__() + + self.proj = nn.Conv2d( + in_chans, embed_dim, kernel_size=kernel_size, stride=stride, padding=padding + ) + + def forward(self, x: torch.Tensor) -> torch.Tensor: + x = self.proj(x) + # B C H W -> B H W C + x = x.permute(0, 2, 3, 1) + return x diff --git a/segment_anything/modeling/mask_decoder.py b/segment_anything/modeling/mask_decoder.py new file mode 100644 index 0000000..242ecb7 --- /dev/null +++ b/segment_anything/modeling/mask_decoder.py @@ -0,0 +1,178 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import nn +from torch.nn import functional as F + +from typing import List, Tuple, Type + +from .common import LayerNorm2d + + +class MaskDecoder(nn.Module): + def __init__( + self, + *, + transformer_dim: int, + transformer: nn.Module, + num_multimask_outputs: int = 3, + activation: Type[nn.Module] = nn.GELU, + iou_head_depth: int = 3, + iou_head_hidden_dim: int = 256, + ) -> None: + """ + Predicts masks given an image and prompt embeddings, using a + transformer architecture. + + Arguments: + transformer_dim (int): the channel dimension of the transformer + transformer (nn.Module): the transformer used to predict masks + num_multimask_outputs (int): the number of masks to predict + when disambiguating masks + activation (nn.Module): the type of activation to use when + upscaling masks + iou_head_depth (int): the depth of the MLP used to predict + mask quality + iou_head_hidden_dim (int): the hidden dimension of the MLP + used to predict mask quality + """ + super().__init__() + self.transformer_dim = transformer_dim + self.transformer = transformer + + self.num_multimask_outputs = num_multimask_outputs + + self.iou_token = nn.Embedding(1, transformer_dim) + self.num_mask_tokens = num_multimask_outputs + 1 + self.mask_tokens = nn.Embedding(self.num_mask_tokens, transformer_dim) + + self.output_upscaling = nn.Sequential( + nn.ConvTranspose2d(transformer_dim, transformer_dim // 4, kernel_size=2, stride=2), + LayerNorm2d(transformer_dim // 4), + activation(), + nn.ConvTranspose2d(transformer_dim // 4, transformer_dim // 8, kernel_size=2, stride=2), + activation(), + ) + self.output_hypernetworks_mlps = nn.ModuleList( + [ + MLP(transformer_dim, transformer_dim, transformer_dim // 8, 3) + for i in range(self.num_mask_tokens) + ] + ) + + self.iou_prediction_head = MLP( + transformer_dim, iou_head_hidden_dim, self.num_mask_tokens, iou_head_depth + ) + + def forward( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + multimask_output: bool, + hq_token_only: bool, + interm_embeddings: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Predict masks given image and prompt embeddings. + + Arguments: + image_embeddings (torch.Tensor): the embeddings from the image encoder + image_pe (torch.Tensor): positional encoding with the shape of image_embeddings + sparse_prompt_embeddings (torch.Tensor): the embeddings of the points and boxes + dense_prompt_embeddings (torch.Tensor): the embeddings of the mask inputs + multimask_output (bool): Whether to return multiple masks or a single + mask. + + Returns: + torch.Tensor: batched predicted masks + torch.Tensor: batched predictions of mask quality + """ + masks, iou_pred = self.predict_masks( + image_embeddings=image_embeddings, + image_pe=image_pe, + sparse_prompt_embeddings=sparse_prompt_embeddings, + dense_prompt_embeddings=dense_prompt_embeddings, + ) + + # Select the correct mask or masks for output + if multimask_output: + mask_slice = slice(1, None) + else: + mask_slice = slice(0, 1) + masks = masks[:, mask_slice, :, :] + iou_pred = iou_pred[:, mask_slice] + + # Prepare output + return masks, iou_pred + + def predict_masks( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Predicts masks. See 'forward' for more details.""" + # Concatenate output tokens + output_tokens = torch.cat([self.iou_token.weight, self.mask_tokens.weight], dim=0) + output_tokens = output_tokens.unsqueeze(0).expand(sparse_prompt_embeddings.size(0), -1, -1) + tokens = torch.cat((output_tokens, sparse_prompt_embeddings), dim=1) + + # Expand per-image data in batch direction to be per-mask + src = torch.repeat_interleave(image_embeddings, tokens.shape[0], dim=0) + src = src + dense_prompt_embeddings + pos_src = torch.repeat_interleave(image_pe, tokens.shape[0], dim=0) + b, c, h, w = src.shape + + # Run the transformer + hs, src = self.transformer(src, pos_src, tokens) + iou_token_out = hs[:, 0, :] + mask_tokens_out = hs[:, 1 : (1 + self.num_mask_tokens), :] + + # Upscale mask embeddings and predict masks using the mask tokens + src = src.transpose(1, 2).view(b, c, h, w) + upscaled_embedding = self.output_upscaling(src) + hyper_in_list: List[torch.Tensor] = [] + for i in range(self.num_mask_tokens): + hyper_in_list.append(self.output_hypernetworks_mlps[i](mask_tokens_out[:, i, :])) + hyper_in = torch.stack(hyper_in_list, dim=1) + b, c, h, w = upscaled_embedding.shape + masks = (hyper_in @ upscaled_embedding.view(b, c, h * w)).view(b, -1, h, w) + + # Generate mask quality predictions + iou_pred = self.iou_prediction_head(iou_token_out) + + return masks, iou_pred + + +# Lightly adapted from +# https://github.com/facebookresearch/MaskFormer/blob/main/mask_former/modeling/transformer/transformer_predictor.py # noqa +class MLP(nn.Module): + def __init__( + self, + input_dim: int, + hidden_dim: int, + output_dim: int, + num_layers: int, + sigmoid_output: bool = False, + ) -> None: + super().__init__() + self.num_layers = num_layers + h = [hidden_dim] * (num_layers - 1) + self.layers = nn.ModuleList( + nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]) + ) + self.sigmoid_output = sigmoid_output + + def forward(self, x): + for i, layer in enumerate(self.layers): + x = F.relu(layer(x)) if i < self.num_layers - 1 else layer(x) + if self.sigmoid_output: + x = F.sigmoid(x) + return x diff --git a/segment_anything/modeling/mask_decoder_hq.py b/segment_anything/modeling/mask_decoder_hq.py new file mode 100644 index 0000000..1e365e3 --- /dev/null +++ b/segment_anything/modeling/mask_decoder_hq.py @@ -0,0 +1,232 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# Modified by HQ-SAM team +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import nn +from torch.nn import functional as F + +from typing import List, Tuple, Type + +from .common import LayerNorm2d + + +class MaskDecoderHQ(nn.Module): + def __init__( + self, + *, + transformer_dim: int, + transformer: nn.Module, + num_multimask_outputs: int = 3, + activation: Type[nn.Module] = nn.GELU, + iou_head_depth: int = 3, + iou_head_hidden_dim: int = 256, + vit_dim: int = 1024, + ) -> None: + """ + Predicts masks given an image and prompt embeddings, using a + transformer architecture. + + Arguments: + transformer_dim (int): the channel dimension of the transformer + transformer (nn.Module): the transformer used to predict masks + num_multimask_outputs (int): the number of masks to predict + when disambiguating masks + activation (nn.Module): the type of activation to use when + upscaling masks + iou_head_depth (int): the depth of the MLP used to predict + mask quality + iou_head_hidden_dim (int): the hidden dimension of the MLP + used to predict mask quality + """ + super().__init__() + self.transformer_dim = transformer_dim + self.transformer = transformer + + self.num_multimask_outputs = num_multimask_outputs + + self.iou_token = nn.Embedding(1, transformer_dim) + self.num_mask_tokens = num_multimask_outputs + 1 + self.mask_tokens = nn.Embedding(self.num_mask_tokens, transformer_dim) + + self.output_upscaling = nn.Sequential( + nn.ConvTranspose2d(transformer_dim, transformer_dim // 4, kernel_size=2, stride=2), + LayerNorm2d(transformer_dim // 4), + activation(), + nn.ConvTranspose2d(transformer_dim // 4, transformer_dim // 8, kernel_size=2, stride=2), + activation(), + ) + self.output_hypernetworks_mlps = nn.ModuleList( + [ + MLP(transformer_dim, transformer_dim, transformer_dim // 8, 3) + for i in range(self.num_mask_tokens) + ] + ) + + self.iou_prediction_head = MLP( + transformer_dim, iou_head_hidden_dim, self.num_mask_tokens, iou_head_depth + ) + + # HQ-SAM parameters + self.hf_token = nn.Embedding(1, transformer_dim) # HQ-Ouptput-Token + self.hf_mlp = MLP(transformer_dim, transformer_dim, transformer_dim // 8, 3) # corresponding new MLP layer for HQ-Ouptput-Token + self.num_mask_tokens = self.num_mask_tokens + 1 + + # three conv fusion layers for obtaining HQ-Feature + self.compress_vit_feat = nn.Sequential( + nn.ConvTranspose2d(vit_dim, transformer_dim, kernel_size=2, stride=2), + LayerNorm2d(transformer_dim), + nn.GELU(), + nn.ConvTranspose2d(transformer_dim, transformer_dim // 8, kernel_size=2, stride=2)) + + self.embedding_encoder = nn.Sequential( + nn.ConvTranspose2d(transformer_dim, transformer_dim // 4, kernel_size=2, stride=2), + LayerNorm2d(transformer_dim // 4), + nn.GELU(), + nn.ConvTranspose2d(transformer_dim // 4, transformer_dim // 8, kernel_size=2, stride=2), + ) + self.embedding_maskfeature = nn.Sequential( + nn.Conv2d(transformer_dim // 8, transformer_dim // 4, 3, 1, 1), + LayerNorm2d(transformer_dim // 4), + nn.GELU(), + nn.Conv2d(transformer_dim // 4, transformer_dim // 8, 3, 1, 1)) + + + + def forward( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + multimask_output: bool, + hq_token_only: bool, + interm_embeddings: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Predict masks given image and prompt embeddings. + + Arguments: + image_embeddings (torch.Tensor): the embeddings from the ViT image encoder + image_pe (torch.Tensor): positional encoding with the shape of image_embeddings + sparse_prompt_embeddings (torch.Tensor): the embeddings of the points and boxes + dense_prompt_embeddings (torch.Tensor): the embeddings of the mask inputs + multimask_output (bool): Whether to return multiple masks or a single + mask. + + Returns: + torch.Tensor: batched predicted masks + torch.Tensor: batched predictions of mask quality + """ + vit_features = interm_embeddings[0].permute(0, 3, 1, 2) # early-layer ViT feature, after 1st global attention block in ViT + hq_features = self.embedding_encoder(image_embeddings) + self.compress_vit_feat(vit_features) + + masks, iou_pred = self.predict_masks( + image_embeddings=image_embeddings, + image_pe=image_pe, + sparse_prompt_embeddings=sparse_prompt_embeddings, + dense_prompt_embeddings=dense_prompt_embeddings, + hq_features=hq_features, + ) + + # Select the correct mask or masks for output + if multimask_output: + # mask with highest score + mask_slice = slice(1,self.num_mask_tokens-1) + iou_pred = iou_pred[:, mask_slice] + iou_pred, max_iou_idx = torch.max(iou_pred,dim=1) + iou_pred = iou_pred.unsqueeze(1) + masks_multi = masks[:, mask_slice, :, :] + masks_sam = masks_multi[torch.arange(masks_multi.size(0)),max_iou_idx].unsqueeze(1) + else: + # singale mask output, default + mask_slice = slice(0, 1) + iou_pred = iou_pred[:,mask_slice] + masks_sam = masks[:,mask_slice] + + masks_hq = masks[:,slice(self.num_mask_tokens-1, self.num_mask_tokens)] + if hq_token_only: + masks = masks_hq + else: + masks = masks_sam + masks_hq + # Prepare output + return masks, iou_pred + + def predict_masks( + self, + image_embeddings: torch.Tensor, + image_pe: torch.Tensor, + sparse_prompt_embeddings: torch.Tensor, + dense_prompt_embeddings: torch.Tensor, + hq_features: torch.Tensor, + ) -> Tuple[torch.Tensor, torch.Tensor]: + """Predicts masks. See 'forward' for more details.""" + # Concatenate output tokens + output_tokens = torch.cat([self.iou_token.weight, self.mask_tokens.weight, self.hf_token.weight], dim=0) + output_tokens = output_tokens.unsqueeze(0).expand(sparse_prompt_embeddings.size(0), -1, -1) + tokens = torch.cat((output_tokens, sparse_prompt_embeddings), dim=1) + + # Expand per-image data in batch direction to be per-mask + src = torch.repeat_interleave(image_embeddings, tokens.shape[0], dim=0) + src = src + dense_prompt_embeddings + pos_src = torch.repeat_interleave(image_pe, tokens.shape[0], dim=0) + b, c, h, w = src.shape + + # Run the transformer + hs, src = self.transformer(src, pos_src, tokens) + iou_token_out = hs[:, 0, :] + mask_tokens_out = hs[:, 1 : (1 + self.num_mask_tokens), :] + + # Upscale mask embeddings and predict masks using the mask tokens + src = src.transpose(1, 2).view(b, c, h, w) + + upscaled_embedding_sam = self.output_upscaling(src) + upscaled_embedding_hq = self.embedding_maskfeature(upscaled_embedding_sam) + hq_features.repeat(b,1,1,1) + + hyper_in_list: List[torch.Tensor] = [] + for i in range(self.num_mask_tokens): + if i < self.num_mask_tokens - 1: + hyper_in_list.append(self.output_hypernetworks_mlps[i](mask_tokens_out[:, i, :])) + else: + hyper_in_list.append(self.hf_mlp(mask_tokens_out[:, i, :])) + + hyper_in = torch.stack(hyper_in_list, dim=1) + b, c, h, w = upscaled_embedding_sam.shape + + masks_sam = (hyper_in[:,:self.num_mask_tokens-1] @ upscaled_embedding_sam.view(b, c, h * w)).view(b, -1, h, w) + masks_sam_hq = (hyper_in[:,self.num_mask_tokens-1:] @ upscaled_embedding_hq.view(b, c, h * w)).view(b, -1, h, w) + masks = torch.cat([masks_sam,masks_sam_hq],dim=1) + # Generate mask quality predictions + iou_pred = self.iou_prediction_head(iou_token_out) + + return masks, iou_pred + + +# Lightly adapted from +# https://github.com/facebookresearch/MaskFormer/blob/main/mask_former/modeling/transformer/transformer_predictor.py # noqa +class MLP(nn.Module): + def __init__( + self, + input_dim: int, + hidden_dim: int, + output_dim: int, + num_layers: int, + sigmoid_output: bool = False, + ) -> None: + super().__init__() + self.num_layers = num_layers + h = [hidden_dim] * (num_layers - 1) + self.layers = nn.ModuleList( + nn.Linear(n, k) for n, k in zip([input_dim] + h, h + [output_dim]) + ) + self.sigmoid_output = sigmoid_output + + def forward(self, x): + for i, layer in enumerate(self.layers): + x = F.relu(layer(x)) if i < self.num_layers - 1 else layer(x) + if self.sigmoid_output: + x = F.sigmoid(x) + return x diff --git a/segment_anything/modeling/prompt_encoder.py b/segment_anything/modeling/prompt_encoder.py new file mode 100644 index 0000000..c3143f4 --- /dev/null +++ b/segment_anything/modeling/prompt_encoder.py @@ -0,0 +1,214 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch import nn + +from typing import Any, Optional, Tuple, Type + +from .common import LayerNorm2d + + +class PromptEncoder(nn.Module): + def __init__( + self, + embed_dim: int, + image_embedding_size: Tuple[int, int], + input_image_size: Tuple[int, int], + mask_in_chans: int, + activation: Type[nn.Module] = nn.GELU, + ) -> None: + """ + Encodes prompts for input to SAM's mask decoder. + + Arguments: + embed_dim (int): The prompts' embedding dimension + image_embedding_size (tuple(int, int)): The spatial size of the + image embedding, as (H, W). + input_image_size (int): The padded size of the image as input + to the image encoder, as (H, W). + mask_in_chans (int): The number of hidden channels used for + encoding input masks. + activation (nn.Module): The activation to use when encoding + input masks. + """ + super().__init__() + self.embed_dim = embed_dim + self.input_image_size = input_image_size + self.image_embedding_size = image_embedding_size + self.pe_layer = PositionEmbeddingRandom(embed_dim // 2) + + self.num_point_embeddings: int = 4 # pos/neg point + 2 box corners + point_embeddings = [nn.Embedding(1, embed_dim) for i in range(self.num_point_embeddings)] + self.point_embeddings = nn.ModuleList(point_embeddings) + self.not_a_point_embed = nn.Embedding(1, embed_dim) + + self.mask_input_size = (4 * image_embedding_size[0], 4 * image_embedding_size[1]) + self.mask_downscaling = nn.Sequential( + nn.Conv2d(1, mask_in_chans // 4, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans // 4), + activation(), + nn.Conv2d(mask_in_chans // 4, mask_in_chans, kernel_size=2, stride=2), + LayerNorm2d(mask_in_chans), + activation(), + nn.Conv2d(mask_in_chans, embed_dim, kernel_size=1), + ) + self.no_mask_embed = nn.Embedding(1, embed_dim) + + def get_dense_pe(self) -> torch.Tensor: + """ + Returns the positional encoding used to encode point prompts, + applied to a dense set of points the shape of the image encoding. + + Returns: + torch.Tensor: Positional encoding with shape + 1x(embed_dim)x(embedding_h)x(embedding_w) + """ + return self.pe_layer(self.image_embedding_size).unsqueeze(0) + + def _embed_points( + self, + points: torch.Tensor, + labels: torch.Tensor, + pad: bool, + ) -> torch.Tensor: + """Embeds point prompts.""" + points = points + 0.5 # Shift to center of pixel + if pad: + padding_point = torch.zeros((points.shape[0], 1, 2), device=points.device) + padding_label = -torch.ones((labels.shape[0], 1), device=labels.device) + points = torch.cat([points, padding_point], dim=1) + labels = torch.cat([labels, padding_label], dim=1) + point_embedding = self.pe_layer.forward_with_coords(points, self.input_image_size) + point_embedding[labels == -1] = 0.0 + point_embedding[labels == -1] += self.not_a_point_embed.weight + point_embedding[labels == 0] += self.point_embeddings[0].weight + point_embedding[labels == 1] += self.point_embeddings[1].weight + return point_embedding + + def _embed_boxes(self, boxes: torch.Tensor) -> torch.Tensor: + """Embeds box prompts.""" + boxes = boxes + 0.5 # Shift to center of pixel + coords = boxes.reshape(-1, 2, 2) + corner_embedding = self.pe_layer.forward_with_coords(coords, self.input_image_size) + corner_embedding[:, 0, :] += self.point_embeddings[2].weight + corner_embedding[:, 1, :] += self.point_embeddings[3].weight + return corner_embedding + + def _embed_masks(self, masks: torch.Tensor) -> torch.Tensor: + """Embeds mask inputs.""" + mask_embedding = self.mask_downscaling(masks) + return mask_embedding + + def _get_batch_size( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> int: + """ + Gets the batch size of the output given the batch size of the input prompts. + """ + if points is not None: + return points[0].shape[0] + elif boxes is not None: + return boxes.shape[0] + elif masks is not None: + return masks.shape[0] + else: + return 1 + + def _get_device(self) -> torch.device: + return self.point_embeddings[0].weight.device + + def forward( + self, + points: Optional[Tuple[torch.Tensor, torch.Tensor]], + boxes: Optional[torch.Tensor], + masks: Optional[torch.Tensor], + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Embeds different types of prompts, returning both sparse and dense + embeddings. + + Arguments: + points (tuple(torch.Tensor, torch.Tensor) or none): point coordinates + and labels to embed. + boxes (torch.Tensor or none): boxes to embed + masks (torch.Tensor or none): masks to embed + + Returns: + torch.Tensor: sparse embeddings for the points and boxes, with shape + BxNx(embed_dim), where N is determined by the number of input points + and boxes. + torch.Tensor: dense embeddings for the masks, in the shape + Bx(embed_dim)x(embed_H)x(embed_W) + """ + bs = self._get_batch_size(points, boxes, masks) + sparse_embeddings = torch.empty((bs, 0, self.embed_dim), device=self._get_device()) + if points is not None: + coords, labels = points + point_embeddings = self._embed_points(coords, labels, pad=(boxes is None)) + sparse_embeddings = torch.cat([sparse_embeddings, point_embeddings], dim=1) + if boxes is not None: + box_embeddings = self._embed_boxes(boxes) + sparse_embeddings = torch.cat([sparse_embeddings, box_embeddings], dim=1) + + if masks is not None: + dense_embeddings = self._embed_masks(masks) + else: + dense_embeddings = self.no_mask_embed.weight.reshape(1, -1, 1, 1).expand( + bs, -1, self.image_embedding_size[0], self.image_embedding_size[1] + ) + + return sparse_embeddings, dense_embeddings + + +class PositionEmbeddingRandom(nn.Module): + """ + Positional encoding using random spatial frequencies. + """ + + def __init__(self, num_pos_feats: int = 64, scale: Optional[float] = None) -> None: + super().__init__() + if scale is None or scale <= 0.0: + scale = 1.0 + self.register_buffer( + "positional_encoding_gaussian_matrix", + scale * torch.randn((2, num_pos_feats)), + ) + + def _pe_encoding(self, coords: torch.Tensor) -> torch.Tensor: + """Positionally encode points that are normalized to [0,1].""" + # assuming coords are in [0, 1]^2 square and have d_1 x ... x d_n x 2 shape + coords = 2 * coords - 1 + coords = coords @ self.positional_encoding_gaussian_matrix + coords = 2 * np.pi * coords + # outputs d_1 x ... x d_n x C shape + return torch.cat([torch.sin(coords), torch.cos(coords)], dim=-1) + + def forward(self, size: Tuple[int, int]) -> torch.Tensor: + """Generate positional encoding for a grid of the specified size.""" + h, w = size + device: Any = self.positional_encoding_gaussian_matrix.device + grid = torch.ones((h, w), device=device, dtype=torch.float32) + y_embed = grid.cumsum(dim=0) - 0.5 + x_embed = grid.cumsum(dim=1) - 0.5 + y_embed = y_embed / h + x_embed = x_embed / w + + pe = self._pe_encoding(torch.stack([x_embed, y_embed], dim=-1)) + return pe.permute(2, 0, 1) # C x H x W + + def forward_with_coords( + self, coords_input: torch.Tensor, image_size: Tuple[int, int] + ) -> torch.Tensor: + """Positionally encode points that are not normalized to [0,1].""" + coords = coords_input.clone() + coords[:, :, 0] = coords[:, :, 0] / image_size[1] + coords[:, :, 1] = coords[:, :, 1] / image_size[0] + return self._pe_encoding(coords.to(torch.float)) # B x N x C diff --git a/segment_anything/modeling/sam.py b/segment_anything/modeling/sam.py new file mode 100644 index 0000000..b928dfd --- /dev/null +++ b/segment_anything/modeling/sam.py @@ -0,0 +1,177 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import nn +from torch.nn import functional as F + +from typing import Any, Dict, List, Tuple + +from .image_encoder import ImageEncoderViT +from .mask_decoder import MaskDecoder +from .prompt_encoder import PromptEncoder + + +class Sam(nn.Module): + mask_threshold: float = 0.0 + image_format: str = "RGB" + + def __init__( + self, + image_encoder: ImageEncoderViT, + prompt_encoder: PromptEncoder, + mask_decoder: MaskDecoder, + pixel_mean: List[float] = [123.675, 116.28, 103.53], + pixel_std: List[float] = [58.395, 57.12, 57.375], + ) -> None: + """ + SAM predicts object masks from an image and input prompts. + + Arguments: + image_encoder (ImageEncoderViT): The backbone used to encode the + image into image embeddings that allow for efficient mask prediction. + prompt_encoder (PromptEncoder): Encodes various types of input prompts. + mask_decoder (MaskDecoder): Predicts masks from the image embeddings + and encoded prompts. + pixel_mean (list(float)): Mean values for normalizing pixels in the input image. + pixel_std (list(float)): Std values for normalizing pixels in the input image. + """ + super().__init__() + self.image_encoder = image_encoder + self.prompt_encoder = prompt_encoder + self.mask_decoder = mask_decoder + self.register_buffer("pixel_mean", torch.Tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.Tensor(pixel_std).view(-1, 1, 1), False) + + @property + def device(self) -> Any: + return self.pixel_mean.device + + def forward( + self, + batched_input: List[Dict[str, Any]], + multimask_output: bool, + hq_token_only: bool =False, + ) -> List[Dict[str, torch.Tensor]]: + """ + Predicts masks end-to-end from provided images and prompts. + If prompts are not known in advance, using SamPredictor is + recommended over calling the model directly. + + Arguments: + batched_input (list(dict)): A list over input images, each a + dictionary with the following keys. A prompt key can be + excluded if it is not present. + 'image': The image as a torch tensor in 3xHxW format, + already transformed for input to the model. + 'original_size': (tuple(int, int)) The original size of + the image before transformation, as (H, W). + 'point_coords': (torch.Tensor) Batched point prompts for + this image, with shape BxNx2. Already transformed to the + input frame of the model. + 'point_labels': (torch.Tensor) Batched labels for point prompts, + with shape BxN. + 'boxes': (torch.Tensor) Batched box inputs, with shape Bx4. + Already transformed to the input frame of the model. + 'mask_inputs': (torch.Tensor) Batched mask inputs to the model, + in the form Bx1xHxW. + multimask_output (bool): Whether the model should predict multiple + disambiguating masks, or return a single mask. + + Returns: + (list(dict)): A list over input images, where each element is + as dictionary with the following keys. + 'masks': (torch.Tensor) Batched binary mask predictions, + with shape BxCxHxW, where B is the number of input prompts, + C is determined by multimask_output, and (H, W) is the + original size of the image. + 'iou_predictions': (torch.Tensor) The model's predictions + of mask quality, in shape BxC. + 'low_res_logits': (torch.Tensor) Low resolution logits with + shape BxCxHxW, where H=W=256. Can be passed as mask input + to subsequent iterations of prediction. + """ + input_images = torch.stack([self.preprocess(x["image"]) for x in batched_input], dim=0) + image_embeddings, interm_embeddings = self.image_encoder(input_images) + interm_embeddings = interm_embeddings[0] # early layer + + outputs = [] + for image_record, curr_embedding, curr_interm in zip(batched_input, image_embeddings, interm_embeddings): + if "point_coords" in image_record: + points = (image_record["point_coords"], image_record["point_labels"]) + else: + points = None + sparse_embeddings, dense_embeddings = self.prompt_encoder( + points=points, + boxes=image_record.get("boxes", None), + masks=image_record.get("mask_inputs", None), + ) + low_res_masks, iou_predictions = self.mask_decoder( + image_embeddings=curr_embedding.unsqueeze(0), + image_pe=self.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embeddings, + dense_prompt_embeddings=dense_embeddings, + multimask_output=multimask_output, + hq_token_only=hq_token_only, + interm_embeddings=curr_interm.unsqueeze(0).unsqueeze(0), + ) + masks = self.postprocess_masks( + low_res_masks, + input_size=image_record["image"].shape[-2:], + original_size=image_record["original_size"], + ) + masks = masks > self.mask_threshold + outputs.append( + { + "masks": masks, + "iou_predictions": iou_predictions, + "low_res_logits": low_res_masks, + } + ) + return outputs + + def postprocess_masks( + self, + masks: torch.Tensor, + input_size: Tuple[int, ...], + original_size: Tuple[int, ...], + ) -> torch.Tensor: + """ + Remove padding and upscale masks to the original image size. + + Arguments: + masks (torch.Tensor): Batched masks from the mask_decoder, + in BxCxHxW format. + input_size (tuple(int, int)): The size of the image input to the + model, in (H, W) format. Used to remove padding. + original_size (tuple(int, int)): The original size of the image + before resizing for input to the model, in (H, W) format. + + Returns: + (torch.Tensor): Batched masks in BxCxHxW format, where (H, W) + is given by original_size. + """ + masks = F.interpolate( + masks, + (self.image_encoder.img_size, self.image_encoder.img_size), + mode="bilinear", + align_corners=False, + ) + masks = masks[..., : input_size[0], : input_size[1]] + masks = F.interpolate(masks, original_size, mode="bilinear", align_corners=False) + return masks + + def preprocess(self, x: torch.Tensor) -> torch.Tensor: + """Normalize pixel values and pad to a square input.""" + # Normalize colors + x = (x - self.pixel_mean) / self.pixel_std + + # Pad + h, w = x.shape[-2:] + padh = self.image_encoder.img_size - h + padw = self.image_encoder.img_size - w + x = F.pad(x, (0, padw, 0, padh)) + return x diff --git a/segment_anything/modeling/transformer.py b/segment_anything/modeling/transformer.py new file mode 100644 index 0000000..28fafea --- /dev/null +++ b/segment_anything/modeling/transformer.py @@ -0,0 +1,240 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +from torch import Tensor, nn + +import math +from typing import Tuple, Type + +from .common import MLPBlock + + +class TwoWayTransformer(nn.Module): + def __init__( + self, + depth: int, + embedding_dim: int, + num_heads: int, + mlp_dim: int, + activation: Type[nn.Module] = nn.ReLU, + attention_downsample_rate: int = 2, + ) -> None: + """ + A transformer decoder that attends to an input image using + queries whose positional embedding is supplied. + + Args: + depth (int): number of layers in the transformer + embedding_dim (int): the channel dimension for the input embeddings + num_heads (int): the number of heads for multihead attention. Must + divide embedding_dim + mlp_dim (int): the channel dimension internal to the MLP block + activation (nn.Module): the activation to use in the MLP block + """ + super().__init__() + self.depth = depth + self.embedding_dim = embedding_dim + self.num_heads = num_heads + self.mlp_dim = mlp_dim + self.layers = nn.ModuleList() + + for i in range(depth): + self.layers.append( + TwoWayAttentionBlock( + embedding_dim=embedding_dim, + num_heads=num_heads, + mlp_dim=mlp_dim, + activation=activation, + attention_downsample_rate=attention_downsample_rate, + skip_first_layer_pe=(i == 0), + ) + ) + + self.final_attn_token_to_image = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + self.norm_final_attn = nn.LayerNorm(embedding_dim) + + def forward( + self, + image_embedding: Tensor, + image_pe: Tensor, + point_embedding: Tensor, + ) -> Tuple[Tensor, Tensor]: + """ + Args: + image_embedding (torch.Tensor): image to attend to. Should be shape + B x embedding_dim x h x w for any h and w. + image_pe (torch.Tensor): the positional encoding to add to the image. Must + have the same shape as image_embedding. + point_embedding (torch.Tensor): the embedding to add to the query points. + Must have shape B x N_points x embedding_dim for any N_points. + + Returns: + torch.Tensor: the processed point_embedding + torch.Tensor: the processed image_embedding + """ + # BxCxHxW -> BxHWxC == B x N_image_tokens x C + bs, c, h, w = image_embedding.shape + image_embedding = image_embedding.flatten(2).permute(0, 2, 1) + image_pe = image_pe.flatten(2).permute(0, 2, 1) + + # Prepare queries + queries = point_embedding + keys = image_embedding + + # Apply transformer blocks and final layernorm + for layer in self.layers: + queries, keys = layer( + queries=queries, + keys=keys, + query_pe=point_embedding, + key_pe=image_pe, + ) + + # Apply the final attention layer from the points to the image + q = queries + point_embedding + k = keys + image_pe + attn_out = self.final_attn_token_to_image(q=q, k=k, v=keys) + queries = queries + attn_out + queries = self.norm_final_attn(queries) + + return queries, keys + + +class TwoWayAttentionBlock(nn.Module): + def __init__( + self, + embedding_dim: int, + num_heads: int, + mlp_dim: int = 2048, + activation: Type[nn.Module] = nn.ReLU, + attention_downsample_rate: int = 2, + skip_first_layer_pe: bool = False, + ) -> None: + """ + A transformer block with four layers: (1) self-attention of sparse + inputs, (2) cross attention of sparse inputs to dense inputs, (3) mlp + block on sparse inputs, and (4) cross attention of dense inputs to sparse + inputs. + + Arguments: + embedding_dim (int): the channel dimension of the embeddings + num_heads (int): the number of heads in the attention layers + mlp_dim (int): the hidden dimension of the mlp block + activation (nn.Module): the activation of the mlp block + skip_first_layer_pe (bool): skip the PE on the first layer + """ + super().__init__() + self.self_attn = Attention(embedding_dim, num_heads) + self.norm1 = nn.LayerNorm(embedding_dim) + + self.cross_attn_token_to_image = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + self.norm2 = nn.LayerNorm(embedding_dim) + + self.mlp = MLPBlock(embedding_dim, mlp_dim, activation) + self.norm3 = nn.LayerNorm(embedding_dim) + + self.norm4 = nn.LayerNorm(embedding_dim) + self.cross_attn_image_to_token = Attention( + embedding_dim, num_heads, downsample_rate=attention_downsample_rate + ) + + self.skip_first_layer_pe = skip_first_layer_pe + + def forward( + self, queries: Tensor, keys: Tensor, query_pe: Tensor, key_pe: Tensor + ) -> Tuple[Tensor, Tensor]: + # Self attention block + if self.skip_first_layer_pe: + queries = self.self_attn(q=queries, k=queries, v=queries) + else: + q = queries + query_pe + attn_out = self.self_attn(q=q, k=q, v=queries) + queries = queries + attn_out + queries = self.norm1(queries) + + # Cross attention block, tokens attending to image embedding + q = queries + query_pe + k = keys + key_pe + attn_out = self.cross_attn_token_to_image(q=q, k=k, v=keys) + queries = queries + attn_out + queries = self.norm2(queries) + + # MLP block + mlp_out = self.mlp(queries) + queries = queries + mlp_out + queries = self.norm3(queries) + + # Cross attention block, image embedding attending to tokens + q = queries + query_pe + k = keys + key_pe + attn_out = self.cross_attn_image_to_token(q=k, k=q, v=queries) + keys = keys + attn_out + keys = self.norm4(keys) + + return queries, keys + + +class Attention(nn.Module): + """ + An attention layer that allows for downscaling the size of the embedding + after projection to queries, keys, and values. + """ + + def __init__( + self, + embedding_dim: int, + num_heads: int, + downsample_rate: int = 1, + ) -> None: + super().__init__() + self.embedding_dim = embedding_dim + self.internal_dim = embedding_dim // downsample_rate + self.num_heads = num_heads + assert self.internal_dim % num_heads == 0, "num_heads must divide embedding_dim." + + self.q_proj = nn.Linear(embedding_dim, self.internal_dim) + self.k_proj = nn.Linear(embedding_dim, self.internal_dim) + self.v_proj = nn.Linear(embedding_dim, self.internal_dim) + self.out_proj = nn.Linear(self.internal_dim, embedding_dim) + + def _separate_heads(self, x: Tensor, num_heads: int) -> Tensor: + b, n, c = x.shape + x = x.reshape(b, n, num_heads, c // num_heads) + return x.transpose(1, 2) # B x N_heads x N_tokens x C_per_head + + def _recombine_heads(self, x: Tensor) -> Tensor: + b, n_heads, n_tokens, c_per_head = x.shape + x = x.transpose(1, 2) + return x.reshape(b, n_tokens, n_heads * c_per_head) # B x N_tokens x C + + def forward(self, q: Tensor, k: Tensor, v: Tensor) -> Tensor: + # Input projections + q = self.q_proj(q) + k = self.k_proj(k) + v = self.v_proj(v) + + # Separate into heads + q = self._separate_heads(q, self.num_heads) + k = self._separate_heads(k, self.num_heads) + v = self._separate_heads(v, self.num_heads) + + # Attention + _, _, _, c_per_head = q.shape + attn = q @ k.permute(0, 1, 3, 2) # B x N_heads x N_tokens x N_tokens + attn = attn / math.sqrt(c_per_head) + attn = torch.softmax(attn, dim=-1) + + # Get output + out = attn @ v + out = self._recombine_heads(out) + out = self.out_proj(out) + + return out diff --git a/segment_anything/predictor.py b/segment_anything/predictor.py new file mode 100644 index 0000000..31458fb --- /dev/null +++ b/segment_anything/predictor.py @@ -0,0 +1,276 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +from .modeling import Sam + +from typing import Optional, Tuple + +from .utils.transforms import ResizeLongestSide + + +class SamPredictor: + def __init__( + self, + sam_model: Sam, + ) -> None: + """ + Uses SAM to calculate the image embedding for an image, and then + allow repeated, efficient mask prediction given prompts. + + Arguments: + sam_model (Sam): The model to use for mask prediction. + """ + super().__init__() + self.model = sam_model + self.transform = ResizeLongestSide(sam_model.image_encoder.img_size) + self.reset_image() + + def set_image( + self, + image: np.ndarray, + image_format: str = "RGB", + ) -> None: + """ + Calculates the image embeddings for the provided image, allowing + masks to be predicted with the 'predict' method. + + Arguments: + image (np.ndarray): The image for calculating masks. Expects an + image in HWC uint8 format, with pixel values in [0, 255]. + image_format (str): The color format of the image, in ['RGB', 'BGR']. + """ + assert image_format in [ + "RGB", + "BGR", + ], f"image_format must be in ['RGB', 'BGR'], is {image_format}." + # import pdb;pdb.set_trace() + if image_format != self.model.image_format: + image = image[..., ::-1] + + # Transform the image to the form expected by the model + # import pdb;pdb.set_trace() + input_image = self.transform.apply_image(image) + input_image_torch = torch.as_tensor(input_image, device=self.device) + input_image_torch = input_image_torch.permute(2, 0, 1).contiguous()[None, :, :, :] + + self.set_torch_image(input_image_torch, image.shape[:2]) + + @torch.no_grad() + def set_torch_image( + self, + transformed_image: torch.Tensor, + original_image_size: Tuple[int, ...], + ) -> None: + """ + Calculates the image embeddings for the provided image, allowing + masks to be predicted with the 'predict' method. Expects the input + image to be already transformed to the format expected by the model. + + Arguments: + transformed_image (torch.Tensor): The input image, with shape + 1x3xHxW, which has been transformed with ResizeLongestSide. + original_image_size (tuple(int, int)): The size of the image + before transformation, in (H, W) format. + """ + assert ( + len(transformed_image.shape) == 4 + and transformed_image.shape[1] == 3 + and max(*transformed_image.shape[2:]) == self.model.image_encoder.img_size + ), f"set_torch_image input must be BCHW with long side {self.model.image_encoder.img_size}." + self.reset_image() + + self.original_size = original_image_size + self.input_size = tuple(transformed_image.shape[-2:]) + input_image = self.model.preprocess(transformed_image) + self.features, self.interm_features = self.model.image_encoder(input_image) + self.is_image_set = True + + def predict( + self, + point_coords: Optional[np.ndarray] = None, + point_labels: Optional[np.ndarray] = None, + box: Optional[np.ndarray] = None, + mask_input: Optional[np.ndarray] = None, + multimask_output: bool = True, + return_logits: bool = False, + hq_token_only: bool =False, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Predict masks for the given input prompts, using the currently set image. + + Arguments: + point_coords (np.ndarray or None): A Nx2 array of point prompts to the + model. Each point is in (X,Y) in pixels. + point_labels (np.ndarray or None): A length N array of labels for the + point prompts. 1 indicates a foreground point and 0 indicates a + background point. + box (np.ndarray or None): A length 4 array given a box prompt to the + model, in XYXY format. + mask_input (np.ndarray): A low resolution mask input to the model, typically + coming from a previous prediction iteration. Has form 1xHxW, where + for SAM, H=W=256. + multimask_output (bool): If true, the model will return three masks. + For ambiguous input prompts (such as a single click), this will often + produce better masks than a single prediction. If only a single + mask is needed, the model's predicted quality score can be used + to select the best mask. For non-ambiguous prompts, such as multiple + input prompts, multimask_output=False can give better results. + return_logits (bool): If true, returns un-thresholded masks logits + instead of a binary mask. + + Returns: + (np.ndarray): The output masks in CxHxW format, where C is the + number of masks, and (H, W) is the original image size. + (np.ndarray): An array of length C containing the model's + predictions for the quality of each mask. + (np.ndarray): An array of shape CxHxW, where C is the number + of masks and H=W=256. These low resolution logits can be passed to + a subsequent iteration as mask input. + """ + if not self.is_image_set: + raise RuntimeError("An image must be set with .set_image(...) before mask prediction.") + + # Transform input prompts + coords_torch, labels_torch, box_torch, mask_input_torch = None, None, None, None + if point_coords is not None: + assert ( + point_labels is not None + ), "point_labels must be supplied if point_coords is supplied." + point_coords = self.transform.apply_coords(point_coords, self.original_size) + coords_torch = torch.as_tensor(point_coords, dtype=torch.float, device=self.device) + labels_torch = torch.as_tensor(point_labels, dtype=torch.int, device=self.device) + coords_torch, labels_torch = coords_torch[None, :, :], labels_torch[None, :] + if box is not None: + box = self.transform.apply_boxes(box, self.original_size) + box_torch = torch.as_tensor(box, dtype=torch.float, device=self.device) + box_torch = box_torch[None, :] + if mask_input is not None: + mask_input_torch = torch.as_tensor(mask_input, dtype=torch.float, device=self.device) + mask_input_torch = mask_input_torch[None, :, :, :] + + masks, iou_predictions, low_res_masks = self.predict_torch( + coords_torch, + labels_torch, + box_torch, + mask_input_torch, + multimask_output, + return_logits=return_logits, + hq_token_only=hq_token_only, + ) + + masks_np = masks[0].detach().cpu().numpy() + iou_predictions_np = iou_predictions[0].detach().cpu().numpy() + low_res_masks_np = low_res_masks[0].detach().cpu().numpy() + return masks_np, iou_predictions_np, low_res_masks_np + + @torch.no_grad() + def predict_torch( + self, + point_coords: Optional[torch.Tensor], + point_labels: Optional[torch.Tensor], + boxes: Optional[torch.Tensor] = None, + mask_input: Optional[torch.Tensor] = None, + multimask_output: bool = True, + return_logits: bool = False, + hq_token_only: bool =False, + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """ + Predict masks for the given input prompts, using the currently set image. + Input prompts are batched torch tensors and are expected to already be + transformed to the input frame using ResizeLongestSide. + + Arguments: + point_coords (torch.Tensor or None): A BxNx2 array of point prompts to the + model. Each point is in (X,Y) in pixels. + point_labels (torch.Tensor or None): A BxN array of labels for the + point prompts. 1 indicates a foreground point and 0 indicates a + background point. + boxes (np.ndarray or None): A Bx4 array given a box prompt to the + model, in XYXY format. + mask_input (np.ndarray): A low resolution mask input to the model, typically + coming from a previous prediction iteration. Has form Bx1xHxW, where + for SAM, H=W=256. Masks returned by a previous iteration of the + predict method do not need further transformation. + multimask_output (bool): If true, the model will return three masks. + For ambiguous input prompts (such as a single click), this will often + produce better masks than a single prediction. If only a single + mask is needed, the model's predicted quality score can be used + to select the best mask. For non-ambiguous prompts, such as multiple + input prompts, multimask_output=False can give better results. + return_logits (bool): If true, returns un-thresholded masks logits + instead of a binary mask. + + Returns: + (torch.Tensor): The output masks in BxCxHxW format, where C is the + number of masks, and (H, W) is the original image size. + (torch.Tensor): An array of shape BxC containing the model's + predictions for the quality of each mask. + (torch.Tensor): An array of shape BxCxHxW, where C is the number + of masks and H=W=256. These low res logits can be passed to + a subsequent iteration as mask input. + """ + if not self.is_image_set: + raise RuntimeError("An image must be set with .set_image(...) before mask prediction.") + + if point_coords is not None: + points = (point_coords, point_labels) + else: + points = None + + # Embed prompts + sparse_embeddings, dense_embeddings = self.model.prompt_encoder( + points=points, + boxes=boxes, + masks=mask_input, + ) + + # Predict masks + low_res_masks, iou_predictions = self.model.mask_decoder( + image_embeddings=self.features, + image_pe=self.model.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embeddings, + dense_prompt_embeddings=dense_embeddings, + multimask_output=multimask_output, + hq_token_only=hq_token_only, + interm_embeddings=self.interm_features, + ) + + # Upscale the masks to the original image resolution + masks = self.model.postprocess_masks(low_res_masks, self.input_size, self.original_size) + + if not return_logits: + masks = masks > self.model.mask_threshold + + return masks, iou_predictions, low_res_masks + + def get_image_embedding(self) -> torch.Tensor: + """ + Returns the image embeddings for the currently set image, with + shape 1xCxHxW, where C is the embedding dimension and (H,W) are + the embedding spatial dimension of SAM (typically C=256, H=W=64). + """ + if not self.is_image_set: + raise RuntimeError( + "An image must be set with .set_image(...) to generate an embedding." + ) + assert self.features is not None, "Features must exist if an image has been set." + return self.features + + @property + def device(self) -> torch.device: + return self.model.device + + def reset_image(self) -> None: + """Resets the currently set image.""" + self.is_image_set = False + self.features = None + self.orig_h = None + self.orig_w = None + self.input_h = None + self.input_w = None diff --git a/segment_anything/utils/__init__.py b/segment_anything/utils/__init__.py new file mode 100644 index 0000000..5277f46 --- /dev/null +++ b/segment_anything/utils/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. diff --git a/segment_anything/utils/amg.py b/segment_anything/utils/amg.py new file mode 100644 index 0000000..be06407 --- /dev/null +++ b/segment_anything/utils/amg.py @@ -0,0 +1,346 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch + +import math +from copy import deepcopy +from itertools import product +from typing import Any, Dict, Generator, ItemsView, List, Tuple + + +class MaskData: + """ + A structure for storing masks and their related data in batched format. + Implements basic filtering and concatenation. + """ + + def __init__(self, **kwargs) -> None: + for v in kwargs.values(): + assert isinstance( + v, (list, np.ndarray, torch.Tensor) + ), "MaskData only supports list, numpy arrays, and torch tensors." + self._stats = dict(**kwargs) + + def __setitem__(self, key: str, item: Any) -> None: + assert isinstance( + item, (list, np.ndarray, torch.Tensor) + ), "MaskData only supports list, numpy arrays, and torch tensors." + self._stats[key] = item + + def __delitem__(self, key: str) -> None: + del self._stats[key] + + def __getitem__(self, key: str) -> Any: + return self._stats[key] + + def items(self) -> ItemsView[str, Any]: + return self._stats.items() + + def filter(self, keep: torch.Tensor) -> None: + for k, v in self._stats.items(): + if v is None: + self._stats[k] = None + elif isinstance(v, torch.Tensor): + self._stats[k] = v[torch.as_tensor(keep, device=v.device)] + elif isinstance(v, np.ndarray): + self._stats[k] = v[keep.detach().cpu().numpy()] + elif isinstance(v, list) and keep.dtype == torch.bool: + self._stats[k] = [a for i, a in enumerate(v) if keep[i]] + elif isinstance(v, list): + self._stats[k] = [v[i] for i in keep] + else: + raise TypeError(f"MaskData key {k} has an unsupported type {type(v)}.") + + def cat(self, new_stats: "MaskData") -> None: + for k, v in new_stats.items(): + if k not in self._stats or self._stats[k] is None: + self._stats[k] = deepcopy(v) + elif isinstance(v, torch.Tensor): + self._stats[k] = torch.cat([self._stats[k], v], dim=0) + elif isinstance(v, np.ndarray): + self._stats[k] = np.concatenate([self._stats[k], v], axis=0) + elif isinstance(v, list): + self._stats[k] = self._stats[k] + deepcopy(v) + else: + raise TypeError(f"MaskData key {k} has an unsupported type {type(v)}.") + + def to_numpy(self) -> None: + for k, v in self._stats.items(): + if isinstance(v, torch.Tensor): + self._stats[k] = v.detach().cpu().numpy() + + +def is_box_near_crop_edge( + boxes: torch.Tensor, crop_box: List[int], orig_box: List[int], atol: float = 20.0 +) -> torch.Tensor: + """Filter masks at the edge of a crop, but not at the edge of the original image.""" + crop_box_torch = torch.as_tensor(crop_box, dtype=torch.float, device=boxes.device) + orig_box_torch = torch.as_tensor(orig_box, dtype=torch.float, device=boxes.device) + boxes = uncrop_boxes_xyxy(boxes, crop_box).float() + near_crop_edge = torch.isclose(boxes, crop_box_torch[None, :], atol=atol, rtol=0) + near_image_edge = torch.isclose(boxes, orig_box_torch[None, :], atol=atol, rtol=0) + near_crop_edge = torch.logical_and(near_crop_edge, ~near_image_edge) + return torch.any(near_crop_edge, dim=1) + + +def box_xyxy_to_xywh(box_xyxy: torch.Tensor) -> torch.Tensor: + box_xywh = deepcopy(box_xyxy) + box_xywh[2] = box_xywh[2] - box_xywh[0] + box_xywh[3] = box_xywh[3] - box_xywh[1] + return box_xywh + + +def batch_iterator(batch_size: int, *args) -> Generator[List[Any], None, None]: + assert len(args) > 0 and all( + len(a) == len(args[0]) for a in args + ), "Batched iteration must have inputs of all the same size." + n_batches = len(args[0]) // batch_size + int(len(args[0]) % batch_size != 0) + for b in range(n_batches): + yield [arg[b * batch_size : (b + 1) * batch_size] for arg in args] + + +def mask_to_rle_pytorch(tensor: torch.Tensor) -> List[Dict[str, Any]]: + """ + Encodes masks to an uncompressed RLE, in the format expected by + pycoco tools. + """ + # Put in fortran order and flatten h,w + b, h, w = tensor.shape + tensor = tensor.permute(0, 2, 1).flatten(1) + + # Compute change indices + diff = tensor[:, 1:] ^ tensor[:, :-1] + change_indices = diff.nonzero() + + # Encode run length + out = [] + for i in range(b): + cur_idxs = change_indices[change_indices[:, 0] == i, 1] + cur_idxs = torch.cat( + [ + torch.tensor([0], dtype=cur_idxs.dtype, device=cur_idxs.device), + cur_idxs + 1, + torch.tensor([h * w], dtype=cur_idxs.dtype, device=cur_idxs.device), + ] + ) + btw_idxs = cur_idxs[1:] - cur_idxs[:-1] + counts = [] if tensor[i, 0] == 0 else [0] + counts.extend(btw_idxs.detach().cpu().tolist()) + out.append({"size": [h, w], "counts": counts}) + return out + + +def rle_to_mask(rle: Dict[str, Any]) -> np.ndarray: + """Compute a binary mask from an uncompressed RLE.""" + h, w = rle["size"] + mask = np.empty(h * w, dtype=bool) + idx = 0 + parity = False + for count in rle["counts"]: + mask[idx : idx + count] = parity + idx += count + parity ^= True + mask = mask.reshape(w, h) + return mask.transpose() # Put in C order + + +def area_from_rle(rle: Dict[str, Any]) -> int: + return sum(rle["counts"][1::2]) + + +def calculate_stability_score( + masks: torch.Tensor, mask_threshold: float, threshold_offset: float +) -> torch.Tensor: + """ + Computes the stability score for a batch of masks. The stability + score is the IoU between the binary masks obtained by thresholding + the predicted mask logits at high and low values. + """ + # One mask is always contained inside the other. + # Save memory by preventing unnecessary cast to torch.int64 + intersections = ( + (masks > (mask_threshold + threshold_offset)) + .sum(-1, dtype=torch.int16) + .sum(-1, dtype=torch.int32) + ) + unions = ( + (masks > (mask_threshold - threshold_offset)) + .sum(-1, dtype=torch.int16) + .sum(-1, dtype=torch.int32) + ) + return intersections / unions + + +def build_point_grid(n_per_side: int) -> np.ndarray: + """Generates a 2D grid of points evenly spaced in [0,1]x[0,1].""" + offset = 1 / (2 * n_per_side) + points_one_side = np.linspace(offset, 1 - offset, n_per_side) + points_x = np.tile(points_one_side[None, :], (n_per_side, 1)) + points_y = np.tile(points_one_side[:, None], (1, n_per_side)) + points = np.stack([points_x, points_y], axis=-1).reshape(-1, 2) + return points + + +def build_all_layer_point_grids( + n_per_side: int, n_layers: int, scale_per_layer: int +) -> List[np.ndarray]: + """Generates point grids for all crop layers.""" + points_by_layer = [] + for i in range(n_layers + 1): + n_points = int(n_per_side / (scale_per_layer**i)) + points_by_layer.append(build_point_grid(n_points)) + return points_by_layer + + +def generate_crop_boxes( + im_size: Tuple[int, ...], n_layers: int, overlap_ratio: float +) -> Tuple[List[List[int]], List[int]]: + """ + Generates a list of crop boxes of different sizes. Each layer + has (2**i)**2 boxes for the ith layer. + """ + crop_boxes, layer_idxs = [], [] + im_h, im_w = im_size + short_side = min(im_h, im_w) + + # Original image + crop_boxes.append([0, 0, im_w, im_h]) + layer_idxs.append(0) + + def crop_len(orig_len, n_crops, overlap): + return int(math.ceil((overlap * (n_crops - 1) + orig_len) / n_crops)) + + for i_layer in range(n_layers): + n_crops_per_side = 2 ** (i_layer + 1) + overlap = int(overlap_ratio * short_side * (2 / n_crops_per_side)) + + crop_w = crop_len(im_w, n_crops_per_side, overlap) + crop_h = crop_len(im_h, n_crops_per_side, overlap) + + crop_box_x0 = [int((crop_w - overlap) * i) for i in range(n_crops_per_side)] + crop_box_y0 = [int((crop_h - overlap) * i) for i in range(n_crops_per_side)] + + # Crops in XYWH format + for x0, y0 in product(crop_box_x0, crop_box_y0): + box = [x0, y0, min(x0 + crop_w, im_w), min(y0 + crop_h, im_h)] + crop_boxes.append(box) + layer_idxs.append(i_layer + 1) + + return crop_boxes, layer_idxs + + +def uncrop_boxes_xyxy(boxes: torch.Tensor, crop_box: List[int]) -> torch.Tensor: + x0, y0, _, _ = crop_box + offset = torch.tensor([[x0, y0, x0, y0]], device=boxes.device) + # Check if boxes has a channel dimension + if len(boxes.shape) == 3: + offset = offset.unsqueeze(1) + return boxes + offset + + +def uncrop_points(points: torch.Tensor, crop_box: List[int]) -> torch.Tensor: + x0, y0, _, _ = crop_box + offset = torch.tensor([[x0, y0]], device=points.device) + # Check if points has a channel dimension + if len(points.shape) == 3: + offset = offset.unsqueeze(1) + return points + offset + + +def uncrop_masks( + masks: torch.Tensor, crop_box: List[int], orig_h: int, orig_w: int +) -> torch.Tensor: + x0, y0, x1, y1 = crop_box + if x0 == 0 and y0 == 0 and x1 == orig_w and y1 == orig_h: + return masks + # Coordinate transform masks + pad_x, pad_y = orig_w - (x1 - x0), orig_h - (y1 - y0) + pad = (x0, pad_x - x0, y0, pad_y - y0) + return torch.nn.functional.pad(masks, pad, value=0) + + +def remove_small_regions( + mask: np.ndarray, area_thresh: float, mode: str +) -> Tuple[np.ndarray, bool]: + """ + Removes small disconnected regions and holes in a mask. Returns the + mask and an indicator of if the mask has been modified. + """ + import cv2 # type: ignore + + assert mode in ["holes", "islands"] + correct_holes = mode == "holes" + working_mask = (correct_holes ^ mask).astype(np.uint8) + n_labels, regions, stats, _ = cv2.connectedComponentsWithStats(working_mask, 8) + sizes = stats[:, -1][1:] # Row 0 is background label + small_regions = [i + 1 for i, s in enumerate(sizes) if s < area_thresh] + if len(small_regions) == 0: + return mask, False + fill_labels = [0] + small_regions + if not correct_holes: + fill_labels = [i for i in range(n_labels) if i not in fill_labels] + # If every region is below threshold, keep largest + if len(fill_labels) == 0: + fill_labels = [int(np.argmax(sizes)) + 1] + mask = np.isin(regions, fill_labels) + return mask, True + + +def coco_encode_rle(uncompressed_rle: Dict[str, Any]) -> Dict[str, Any]: + from pycocotools import mask as mask_utils # type: ignore + + h, w = uncompressed_rle["size"] + rle = mask_utils.frPyObjects(uncompressed_rle, h, w) + rle["counts"] = rle["counts"].decode("utf-8") # Necessary to serialize with json + return rle + + +def batched_mask_to_box(masks: torch.Tensor) -> torch.Tensor: + """ + Calculates boxes in XYXY format around masks. Return [0,0,0,0] for + an empty mask. For input shape C1xC2x...xHxW, the output shape is C1xC2x...x4. + """ + # torch.max below raises an error on empty inputs, just skip in this case + if torch.numel(masks) == 0: + return torch.zeros(*masks.shape[:-2], 4, device=masks.device) + + # Normalize shape to CxHxW + shape = masks.shape + h, w = shape[-2:] + if len(shape) > 2: + masks = masks.flatten(0, -3) + else: + masks = masks.unsqueeze(0) + + # Get top and bottom edges + in_height, _ = torch.max(masks, dim=-1) + in_height_coords = in_height * torch.arange(h, device=in_height.device)[None, :] + bottom_edges, _ = torch.max(in_height_coords, dim=-1) + in_height_coords = in_height_coords + h * (~in_height) + top_edges, _ = torch.min(in_height_coords, dim=-1) + + # Get left and right edges + in_width, _ = torch.max(masks, dim=-2) + in_width_coords = in_width * torch.arange(w, device=in_width.device)[None, :] + right_edges, _ = torch.max(in_width_coords, dim=-1) + in_width_coords = in_width_coords + w * (~in_width) + left_edges, _ = torch.min(in_width_coords, dim=-1) + + # If the mask is empty the right edge will be to the left of the left edge. + # Replace these boxes with [0, 0, 0, 0] + empty_filter = (right_edges < left_edges) | (bottom_edges < top_edges) + out = torch.stack([left_edges, top_edges, right_edges, bottom_edges], dim=-1) + out = out * (~empty_filter).unsqueeze(-1) + + # Return to original shape + if len(shape) > 2: + out = out.reshape(*shape[:-2], 4) + else: + out = out[0] + + return out diff --git a/segment_anything/utils/onnx.py b/segment_anything/utils/onnx.py new file mode 100644 index 0000000..8013dc4 --- /dev/null +++ b/segment_anything/utils/onnx.py @@ -0,0 +1,155 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from torch.nn import functional as F + +from typing import Tuple + +from ..modeling import Sam +from .amg import calculate_stability_score + + +class SamOnnxModel(nn.Module): + """ + This model should not be called directly, but is used in ONNX export. + It combines the prompt encoder, mask decoder, and mask postprocessing of Sam, + with some functions modified to enable model tracing. Also supports extra + options controlling what information. See the ONNX export script for details. + """ + + def __init__( + self, + model: Sam, + hq_token_only: bool = False, + multimask_output: bool = False, + use_stability_score: bool = False, + return_extra_metrics: bool = False, + ) -> None: + super().__init__() + self.mask_decoder = model.mask_decoder + self.model = model + self.img_size = model.image_encoder.img_size + self.hq_token_only = hq_token_only + self.multimask_output = multimask_output + self.use_stability_score = use_stability_score + self.stability_score_offset = 1.0 + self.return_extra_metrics = return_extra_metrics + + @staticmethod + def resize_longest_image_size( + input_image_size: torch.Tensor, longest_side: int + ) -> torch.Tensor: + input_image_size = input_image_size.to(torch.float32) + scale = longest_side / torch.max(input_image_size) + transformed_size = scale * input_image_size + transformed_size = torch.floor(transformed_size + 0.5).to(torch.int64) + return transformed_size + + def _embed_points(self, point_coords: torch.Tensor, point_labels: torch.Tensor) -> torch.Tensor: + point_coords = point_coords + 0.5 + point_coords = point_coords / self.img_size + point_embedding = self.model.prompt_encoder.pe_layer._pe_encoding(point_coords) + point_labels = point_labels.unsqueeze(-1).expand_as(point_embedding) + + point_embedding = point_embedding * (point_labels != -1) + point_embedding = point_embedding + self.model.prompt_encoder.not_a_point_embed.weight * ( + point_labels == -1 + ) + + for i in range(self.model.prompt_encoder.num_point_embeddings): + point_embedding = point_embedding + self.model.prompt_encoder.point_embeddings[ + i + ].weight * (point_labels == i) + + return point_embedding + + def _embed_masks(self, input_mask: torch.Tensor, has_mask_input: torch.Tensor) -> torch.Tensor: + mask_embedding = has_mask_input * self.model.prompt_encoder.mask_downscaling(input_mask) + mask_embedding = mask_embedding + ( + 1 - has_mask_input + ) * self.model.prompt_encoder.no_mask_embed.weight.reshape(1, -1, 1, 1) + return mask_embedding + + def mask_postprocessing(self, masks: torch.Tensor, orig_im_size: torch.Tensor) -> torch.Tensor: + masks = F.interpolate( + masks, + size=(self.img_size, self.img_size), + mode="bilinear", + align_corners=False, + ) + + prepadded_size = self.resize_longest_image_size(orig_im_size, self.img_size).to(torch.int64) + masks = masks[..., : prepadded_size[0], : prepadded_size[1]] # type: ignore + + orig_im_size = orig_im_size.to(torch.int64) + h, w = orig_im_size[0], orig_im_size[1] + masks = F.interpolate(masks, size=(h, w), mode="bilinear", align_corners=False) + return masks + + + @torch.no_grad() + def forward( + self, + image_embeddings: torch.Tensor, + interm_embeddings: torch.Tensor, + point_coords: torch.Tensor, + point_labels: torch.Tensor, + mask_input: torch.Tensor, + has_mask_input: torch.Tensor, + orig_im_size: torch.Tensor, + ): + sparse_embedding = self._embed_points(point_coords, point_labels) + dense_embedding = self._embed_masks(mask_input, has_mask_input) + + vit_features = interm_embeddings[0].permute(0, 3, 1, 2) # early-layer ViT feature, after 1st global attention block in ViT + hq_features = self.model.mask_decoder.embedding_encoder(image_embeddings) + self.model.mask_decoder.compress_vit_feat(vit_features) + + masks, scores = self.model.mask_decoder.predict_masks( + image_embeddings=image_embeddings, + image_pe=self.model.prompt_encoder.get_dense_pe(), + sparse_prompt_embeddings=sparse_embedding, + dense_prompt_embeddings=dense_embedding, + hq_features=hq_features, + ) + + if self.use_stability_score: + scores = calculate_stability_score( + masks, self.model.mask_threshold, self.stability_score_offset + ) + + if self.multimask_output: + # mask with highest score + mask_slice = slice(1,self.model.mask_decoder.num_mask_tokens-1) + scores = scores[:, mask_slice] + scores, max_iou_idx = torch.max(scores,dim=1) + scores = scores.unsqueeze(1) + masks_multi = masks[:, mask_slice, :, :] + masks_sam = masks_multi[torch.arange(masks_multi.size(0)),max_iou_idx].unsqueeze(1) + else: + # singale mask output, default + mask_slice = slice(0, 1) + scores = scores[:,mask_slice] + masks_sam = masks[:,mask_slice] + + masks_hq = masks[:,slice(self.model.mask_decoder.num_mask_tokens-1, self.model.mask_decoder.num_mask_tokens)] + + if self.hq_token_only: + masks = masks_hq + else: + masks = masks_sam + masks_hq + + upscaled_masks = self.mask_postprocessing(masks, orig_im_size) + + if self.return_extra_metrics: + stability_scores = calculate_stability_score( + upscaled_masks, self.model.mask_threshold, self.stability_score_offset + ) + areas = (upscaled_masks > self.model.mask_threshold).sum(-1).sum(-1) + return upscaled_masks, scores, stability_scores, areas, masks + + return upscaled_masks, scores, masks diff --git a/segment_anything/utils/transforms.py b/segment_anything/utils/transforms.py new file mode 100644 index 0000000..c08ba1e --- /dev/null +++ b/segment_anything/utils/transforms.py @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import numpy as np +import torch +from torch.nn import functional as F +from torchvision.transforms.functional import resize, to_pil_image # type: ignore + +from copy import deepcopy +from typing import Tuple + + +class ResizeLongestSide: + """ + Resizes images to the longest side 'target_length', as well as provides + methods for resizing coordinates and boxes. Provides methods for + transforming both numpy array and batched torch tensors. + """ + + def __init__(self, target_length: int) -> None: + self.target_length = target_length + + def apply_image(self, image: np.ndarray) -> np.ndarray: + """ + Expects a numpy array with shape HxWxC in uint8 format. + """ + target_size = self.get_preprocess_shape(image.shape[0], image.shape[1], self.target_length) + return np.array(resize(to_pil_image(image), target_size)) + + def apply_coords(self, coords: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array of length 2 in the final dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).astype(float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes(self, boxes: np.ndarray, original_size: Tuple[int, ...]) -> np.ndarray: + """ + Expects a numpy array shape Bx4. Requires the original image size + in (H, W) format. + """ + boxes = self.apply_coords(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + def apply_image_torch(self, image: torch.Tensor) -> torch.Tensor: + """ + Expects batched images with shape BxCxHxW and float format. This + transformation may not exactly match apply_image. apply_image is + the transformation expected by the model. + """ + # Expects an image in BCHW format. May not exactly match apply_image. + target_size = self.get_preprocess_shape(image.shape[2], image.shape[3], self.target_length) + return F.interpolate( + image, target_size, mode="bilinear", align_corners=False, antialias=True + ) + + def apply_coords_torch( + self, coords: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with length 2 in the last dimension. Requires the + original image size in (H, W) format. + """ + old_h, old_w = original_size + new_h, new_w = self.get_preprocess_shape( + original_size[0], original_size[1], self.target_length + ) + coords = deepcopy(coords).to(torch.float) + coords[..., 0] = coords[..., 0] * (new_w / old_w) + coords[..., 1] = coords[..., 1] * (new_h / old_h) + return coords + + def apply_boxes_torch( + self, boxes: torch.Tensor, original_size: Tuple[int, ...] + ) -> torch.Tensor: + """ + Expects a torch tensor with shape Bx4. Requires the original image + size in (H, W) format. + """ + boxes = self.apply_coords_torch(boxes.reshape(-1, 2, 2), original_size) + return boxes.reshape(-1, 4) + + @staticmethod + def get_preprocess_shape(oldh: int, oldw: int, long_side_length: int) -> Tuple[int, int]: + """ + Compute the output size given input size and target long side length. + """ + scale = long_side_length * 1.0 / max(oldh, oldw) + newh, neww = oldh * scale, oldw * scale + neww = int(neww + 0.5) + newh = int(newh + 0.5) + return (newh, neww) diff --git a/ui/MainWindow.py b/ui/MainWindow.py index 135f802..656819d 100644 --- a/ui/MainWindow.py +++ b/ui/MainWindow.py @@ -77,6 +77,19 @@ class Ui_MainWindow(object): font.setPointSize(12) self.menuEdit.setFont(font) self.menuEdit.setObjectName("menuEdit") + self.menuMode = QtWidgets.QMenu(self.menubar) + font = QtGui.QFont() + font.setFamily("Times New Roman") + font.setPointSize(12) + font.setBold(False) + font.setItalic(False) + font.setWeight(50) + self.menuMode.setFont(font) + self.menuMode.setObjectName("menuMode") + self.menuContour_mode = QtWidgets.QMenu(self.menuMode) + self.menuContour_mode.setObjectName("menuContour_mode") + self.menuSAM_model = QtWidgets.QMenu(self.menubar) + self.menuSAM_model.setObjectName("menuSAM_model") MainWindow.setMenuBar(self.menubar) self.statusbar = QtWidgets.QStatusBar(MainWindow) self.statusbar.setLayoutDirection(QtCore.Qt.LeftToRight) @@ -100,20 +113,28 @@ class Ui_MainWindow(object): self.dockWidgetContents_2.setObjectName("dockWidgetContents_2") self.info_dock.setWidget(self.dockWidgetContents_2) MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.info_dock) - self.labels_dock = QtWidgets.QDockWidget(MainWindow) - self.labels_dock.setMinimumSize(QtCore.QSize(85, 43)) - self.labels_dock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures) - self.labels_dock.setObjectName("labels_dock") + self.annos_dock = QtWidgets.QDockWidget(MainWindow) + self.annos_dock.setMinimumSize(QtCore.QSize(85, 43)) + self.annos_dock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures) + self.annos_dock.setObjectName("annos_dock") self.dockWidgetContents_3 = QtWidgets.QWidget() self.dockWidgetContents_3.setObjectName("dockWidgetContents_3") - self.labels_dock.setWidget(self.dockWidgetContents_3) - MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.labels_dock) + self.annos_dock.setWidget(self.dockWidgetContents_3) + MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.annos_dock) self.files_dock = QtWidgets.QDockWidget(MainWindow) self.files_dock.setObjectName("files_dock") self.dockWidgetContents = QtWidgets.QWidget() self.dockWidgetContents.setObjectName("dockWidgetContents") self.files_dock.setWidget(self.dockWidgetContents) MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(2), self.files_dock) + self.categories_dock = QtWidgets.QDockWidget(MainWindow) + self.categories_dock.setObjectName("categories_dock") + self.dockWidgetContents_4 = QtWidgets.QWidget() + self.dockWidgetContents_4.setObjectName("dockWidgetContents_4") + self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.dockWidgetContents_4) + self.verticalLayout_2.setObjectName("verticalLayout_2") + self.categories_dock.setWidget(self.dockWidgetContents_4) + MainWindow.addDockWidget(QtCore.Qt.DockWidgetArea(1), self.categories_dock) self.actionOpen_dir = QtWidgets.QAction(MainWindow) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(":/icon/icons/照片_pic.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off) @@ -268,6 +289,27 @@ class Ui_MainWindow(object): icon26.addPixmap(QtGui.QPixmap(":/icon/icons/labelme_32x32.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.actionTo_LabelMe.setIcon(icon26) self.actionTo_LabelMe.setObjectName("actionTo_LabelMe") + self.actionContour_Max_only = QtWidgets.QAction(MainWindow) + self.actionContour_Max_only.setCheckable(True) + font = QtGui.QFont() + font.setFamily("Times New Roman") + font.setPointSize(12) + self.actionContour_Max_only.setFont(font) + self.actionContour_Max_only.setObjectName("actionContour_Max_only") + self.actionContour_External = QtWidgets.QAction(MainWindow) + self.actionContour_External.setCheckable(True) + font = QtGui.QFont() + font.setFamily("Times New Roman") + font.setPointSize(12) + self.actionContour_External.setFont(font) + self.actionContour_External.setObjectName("actionContour_External") + self.actionContour_All = QtWidgets.QAction(MainWindow) + self.actionContour_All.setCheckable(True) + font = QtGui.QFont() + font.setFamily("Times New Roman") + font.setPointSize(12) + self.actionContour_All.setFont(font) + self.actionContour_All.setObjectName("actionContour_All") self.menuFile.addAction(self.actionOpen_dir) self.menuFile.addAction(self.actionSave_dir) self.menuFile.addSeparator() @@ -305,9 +347,15 @@ class Ui_MainWindow(object): self.menuEdit.addAction(self.actionEdit) self.menuEdit.addAction(self.actionDelete) self.menuEdit.addAction(self.actionSave) + self.menuContour_mode.addAction(self.actionContour_Max_only) + self.menuContour_mode.addAction(self.actionContour_External) + self.menuContour_mode.addAction(self.actionContour_All) + self.menuMode.addAction(self.menuContour_mode.menuAction()) self.menubar.addAction(self.menuFile.menuAction()) self.menubar.addAction(self.menuEdit.menuAction()) self.menubar.addAction(self.menuView.menuAction()) + self.menubar.addAction(self.menuSAM_model.menuAction()) + self.menubar.addAction(self.menuMode.menuAction()) self.menubar.addAction(self.menuTools.menuAction()) self.menubar.addAction(self.menuAbout.menuAction()) self.toolBar.addAction(self.actionPrev) @@ -344,10 +392,14 @@ class Ui_MainWindow(object): self.menuLaguage.setTitle(_translate("MainWindow", "Laguage")) self.menuTools.setTitle(_translate("MainWindow", "Tools")) self.menuEdit.setTitle(_translate("MainWindow", "Edit")) + self.menuMode.setTitle(_translate("MainWindow", "Mode")) + self.menuContour_mode.setTitle(_translate("MainWindow", "Contour mode")) + self.menuSAM_model.setTitle(_translate("MainWindow", "SAM")) self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar")) self.info_dock.setWindowTitle(_translate("MainWindow", "Info")) - self.labels_dock.setWindowTitle(_translate("MainWindow", "Labels")) + self.annos_dock.setWindowTitle(_translate("MainWindow", "Annos")) self.files_dock.setWindowTitle(_translate("MainWindow", "Files")) + self.categories_dock.setWindowTitle(_translate("MainWindow", "Categories")) self.actionOpen_dir.setText(_translate("MainWindow", "Images dir")) self.actionOpen_dir.setStatusTip(_translate("MainWindow", "Open images dir.")) self.actionZoom_in.setText(_translate("MainWindow", "Zoom in")) @@ -422,7 +474,8 @@ class Ui_MainWindow(object): self.actionPolygon.setStatusTip(_translate("MainWindow", "Accurately annotate by drawing polygon. ")) self.actionPolygon.setShortcut(_translate("MainWindow", "C")) self.actionVisible.setText(_translate("MainWindow", "Visible")) - self.actionVisible.setStatusTip(_translate("MainWindow", "Visible")) + self.actionVisible.setToolTip(_translate("MainWindow", "Visible.")) + self.actionVisible.setStatusTip(_translate("MainWindow", "Visible.")) self.actionVisible.setShortcut(_translate("MainWindow", "V")) self.actionToCOCO.setText(_translate("MainWindow", "To COCO")) self.actionToCOCO.setToolTip(_translate("MainWindow", "Convert ISAT to COCO")) @@ -433,4 +486,13 @@ class Ui_MainWindow(object): self.actionTo_LabelMe.setText(_translate("MainWindow", "To LabelMe")) self.actionTo_LabelMe.setToolTip(_translate("MainWindow", "Convert ISAT to LabelMe")) self.actionTo_LabelMe.setStatusTip(_translate("MainWindow", "Convert ISAT jsons to LabelMe jsons.")) + self.actionContour_Max_only.setText(_translate("MainWindow", "Max only")) + self.actionContour_Max_only.setStatusTip(_translate("MainWindow", "Contour save max only.")) + self.actionContour_Max_only.setWhatsThis(_translate("MainWindow", "Contour save max only.")) + self.actionContour_External.setText(_translate("MainWindow", "External")) + self.actionContour_External.setStatusTip(_translate("MainWindow", "Contour save external only.")) + self.actionContour_External.setWhatsThis(_translate("MainWindow", "Contour save external only.")) + self.actionContour_All.setText(_translate("MainWindow", "All")) + self.actionContour_All.setStatusTip(_translate("MainWindow", "Contour save all.")) + self.actionContour_All.setWhatsThis(_translate("MainWindow", "Contour save all.")) import icons_rc diff --git a/ui/MainWindow.ui b/ui/MainWindow.ui index 13529ad..481b280 100644 --- a/ui/MainWindow.ui +++ b/ui/MainWindow.ui @@ -179,9 +179,39 @@ + + + + Times New Roman + 12 + 50 + false + false + + + + Mode + + + + Contour mode + + + + + + + + + + SAM + + + + @@ -258,7 +288,7 @@ - + 85 @@ -269,7 +299,7 @@ QDockWidget::AllDockWidgetFeatures - Labels + Annos 2 @@ -285,6 +315,17 @@ + + + Categories + + + 1 + + + + + @@ -693,14 +734,17 @@ - + :/icon/icons/眼睛_eyes.svg:/icon/icons/眼睛_eyes.svg Visible + + Visible. + - Visible + Visible. V @@ -751,6 +795,66 @@ Convert ISAT jsons to LabelMe jsons. + + + true + + + Max only + + + Contour save max only. + + + Contour save max only. + + + + Times New Roman + 12 + + + + + + true + + + External + + + Contour save external only. + + + Contour save external only. + + + + Times New Roman + 12 + + + + + + true + + + All + + + Contour save all. + + + Contour save all. + + + + Times New Roman + 12 + + + diff --git a/ui/label_dock.py b/ui/anno_dock.py similarity index 97% rename from ui/label_dock.py rename to ui/anno_dock.py index 54bd230..7f653f3 100644 --- a/ui/label_dock.py +++ b/ui/anno_dock.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- -# Form implementation generated from reading ui file 'label_dock.ui' -# # Created by: PyQt5 UI code generator 5.15.7 # # WARNING: Any manual changes made to this file will be lost when pyuic5 is diff --git a/ui/label_dock.ui b/ui/anno_dock.ui similarity index 100% rename from ui/label_dock.ui rename to ui/anno_dock.ui diff --git a/ui/category_dock.py b/ui/category_dock.py new file mode 100644 index 0000000..a645acc --- /dev/null +++ b/ui/category_dock.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- + +# Form implementation generated from reading ui file '/home/super/PycharmProjects/ISAT_with_segment_anything/ui/label_dock.ui' +# +# Created by: PyQt5 UI code generator 5.15.7 +# +# WARNING: Any manual changes made to this file will be lost when pyuic5 is +# run again. Do not edit this file unless you know what you are doing. + + +from PyQt5 import QtCore, QtGui, QtWidgets + + +class Ui_Form(object): + def setupUi(self, Form): + Form.setObjectName("Form") + Form.resize(231, 462) + self.verticalLayout = QtWidgets.QVBoxLayout(Form) + self.verticalLayout.setObjectName("verticalLayout") + self.listWidget = QtWidgets.QListWidget(Form) + self.listWidget.setSelectionMode(QtWidgets.QAbstractItemView.SingleSelection) + self.listWidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows) + self.listWidget.setObjectName("listWidget") + self.verticalLayout.addWidget(self.listWidget) + + self.retranslateUi(Form) + QtCore.QMetaObject.connectSlotsByName(Form) + + def retranslateUi(self, Form): + _translate = QtCore.QCoreApplication.translate + Form.setWindowTitle(_translate("Form", "Form")) diff --git a/ui/category_dock.ui b/ui/category_dock.ui new file mode 100644 index 0000000..493b535 --- /dev/null +++ b/ui/category_dock.ui @@ -0,0 +1,31 @@ + + + Form + + + + 0 + 0 + 231 + 462 + + + + Form + + + + + + QAbstractItemView::SingleSelection + + + QAbstractItemView::SelectRows + + + + + + + + diff --git a/widgets/labels_dock_widget.py b/widgets/annos_dock_widget.py similarity index 98% rename from widgets/labels_dock_widget.py rename to widgets/annos_dock_widget.py index aa1cde0..bdc85c4 100644 --- a/widgets/labels_dock_widget.py +++ b/widgets/annos_dock_widget.py @@ -2,13 +2,13 @@ # @Author : LG from PyQt5 import QtWidgets, QtCore, QtGui -from ui.label_dock import Ui_Form +from ui.anno_dock import Ui_Form import functools import re -class LabelsDockWidget(QtWidgets.QWidget, Ui_Form): +class AnnosDockWidget(QtWidgets.QWidget, Ui_Form): def __init__(self, mainwindow): - super(LabelsDockWidget, self).__init__() + super(AnnosDockWidget, self).__init__() self.setupUi(self) self.mainwindow = mainwindow self.polygon_item_dict = {} diff --git a/widgets/canvas.py b/widgets/canvas.py index 47984df..166a2a4 100644 --- a/widgets/canvas.py +++ b/widgets/canvas.py @@ -2,9 +2,8 @@ # @Author : LG from PyQt5 import QtWidgets, QtGui, QtCore -from enum import Enum from widgets.polygon import Polygon -from configs import STATUSMode, CLICKMode, DRAWMode +from configs import STATUSMode, CLICKMode, DRAWMode, CONTOURMode from PIL import Image import numpy as np import cv2 @@ -20,9 +19,11 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.mode = STATUSMode.VIEW self.click = CLICKMode.POSITIVE self.draw_mode = DRAWMode.SEGMENTANYTHING # 默认使用segment anything进行快速标注 + self.contour_mode = CONTOURMode.SAVE_EXTERNAL # 默认SAM只保留外轮廓 self.click_points = [] self.click_points_mode = [] self.masks:np.ndarray = None + self.mask_alpha = 0.5 self.top_layer = 1 self.guide_line_x:QtWidgets.QGraphicsLineItem = None @@ -42,7 +43,7 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.image_data = np.repeat(self.image_data, 3, axis=2) # 转换为三通道 self.mainwindow.segany.set_image(self.image_data) else: - self.mainwindow.statusbar.showMessage("Segment anything don't support the image with {} ndim.".format(self.image_data.ndim)) + self.mainwindow.statusbar.showMessage("Segment anything don't support the image with shape {} .".format(self.image_data.shape)) self.image_item = QtWidgets.QGraphicsPixmapItem() self.image_item.setZValue(0) @@ -76,7 +77,7 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.mainwindow.actionSave.setEnabled(False) self.mainwindow.set_labels_visible(False) - self.mainwindow.labels_dock_widget.setEnabled(False) + self.mainwindow.annos_dock_widget.setEnabled(False) def change_mode_to_view(self): self.mode = STATUSMode.VIEW @@ -101,7 +102,7 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.mainwindow.actionSave.setEnabled(self.mainwindow.can_be_annotated) self.mainwindow.set_labels_visible(True) - self.mainwindow.labels_dock_widget.setEnabled(True) + self.mainwindow.annos_dock_widget.setEnabled(True) def change_mode_to_edit(self): self.mode = STATUSMode.EDIT @@ -128,6 +129,15 @@ class AnnotationScene(QtWidgets.QGraphicsScene): def change_click_to_negative(self): self.click = CLICKMode.NEGATIVE + def change_contour_mode_to_save_all(self): + self.contour_mode = CONTOURMode.SAVE_ALL + + def change_contour_mode_to_save_max_only(self): + self.contour_mode = CONTOURMode.SAVE_MAX_ONLY + + def change_contour_mode_to_save_external(self): + self.contour_mode = CONTOURMode.SAVE_EXTERNAL + def start_segment_anything(self): self.draw_mode = DRAWMode.SEGMENTANYTHING self.start_draw() @@ -154,6 +164,11 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.change_mode_to_view() + category = self.mainwindow.current_category + group = self.mainwindow.current_group + is_crowd = False + note = '' + if self.draw_mode == DRAWMode.SEGMENTANYTHING: # mask to polygon # -------------- @@ -163,17 +178,54 @@ class AnnotationScene(QtWidgets.QGraphicsScene): h, w = masks.shape[-2:] masks = masks.reshape(h, w) - contours, _ = cv2.findContours(masks, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS) + if self.contour_mode == CONTOURMode.SAVE_ALL: + # 当保留所有轮廓时,检测所有轮廓,并建立二层等级关系 + contours, hierarchy = cv2.findContours(masks, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_TC89_KCOS) + else: + # 当只保留外轮廓或单个mask时,只检测外轮廓 + contours, hierarchy = cv2.findContours(masks, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS) - # 这里取轮廓点数最多的(可能返回多个轮廓) - contour = contours[0] - for cont in contours: - if len(cont) > len(contour): - contour = cont + if self.contour_mode == CONTOURMode.SAVE_MAX_ONLY: + contour = contours[0] + for cont in contours: + if len(cont) > len(contour): + contour = cont + contours = [contour] - for point in contour: - x, y = point[0] - self.current_graph.addPoint(QtCore.QPointF(x, y)) + for index, contour in enumerate(contours): + if self.current_graph is None: + self.current_graph = Polygon() + self.addItem(self.current_graph) + + if len(contour) < 3: + continue + for point in contour: + x, y = point[0] + self.current_graph.addPoint(QtCore.QPointF(x, y)) + + if self.contour_mode == CONTOURMode.SAVE_ALL and hierarchy[0][index][3] != -1: + # 保存所有轮廓,且当前轮廓为子轮廓,则自轮廓类别设置为背景 + category = '__background__' + group = 0 + else: + category = self.mainwindow.current_category + group = self.mainwindow.current_group + + self.current_graph.set_drawed(category, + group, + is_crowd, + note, + QtGui.QColor(self.mainwindow.category_color_dict[category]), + self.top_layer) + + # 添加新polygon + self.mainwindow.polygons.append(self.current_graph) + # 设置为最高图层 + self.current_graph.setZValue(len(self.mainwindow.polygons)) + for vertex in self.current_graph.vertexs: + vertex.setZValue(len(self.mainwindow.polygons)) + self.current_graph = None + self.mainwindow.current_group += 1 elif self.draw_mode == DRAWMode.POLYGON: if len(self.current_graph.points) < 1: @@ -198,9 +250,28 @@ class AnnotationScene(QtWidgets.QGraphicsScene): self.current_graph.addPoint(last_point) self.current_graph.addPoint(QtCore.QPointF(last_point.x(), first_point.y())) + # 设置polygon 属性 + self.current_graph.set_drawed(category, + group, + is_crowd, + note, + QtGui.QColor(self.mainwindow.category_color_dict[category]), + self.top_layer) + self.mainwindow.current_group += 1 + # 添加新polygon + self.mainwindow.polygons.append(self.current_graph) + # 设置为最高图层 + self.current_graph.setZValue(len(self.mainwindow.polygons)) + for vertex in self.current_graph.vertexs: + vertex.setZValue(len(self.mainwindow.polygons)) # 选择类别 - self.mainwindow.category_choice_widget.load_cfg() - self.mainwindow.category_choice_widget.show() + # self.mainwindow.category_choice_widget.load_cfg() + # self.mainwindow.category_choice_widget.show() + + self.mainwindow.annos_dock_widget.update_listwidget() + + self.current_graph = None + self.change_mode_to_view() # mask清空 self.click_points.clear() @@ -234,7 +305,7 @@ class AnnotationScene(QtWidgets.QGraphicsScene): for p in self.mainwindow.polygons: if p.zValue() > deleted_layer: p.setZValue(p.zValue() - 1) - self.mainwindow.labels_dock_widget.update_listwidget() + self.mainwindow.annos_dock_widget.update_listwidget() def edit_polygon(self): selectd_items = self.selectedItems() @@ -368,18 +439,21 @@ class AnnotationScene(QtWidgets.QGraphicsScene): def update_mask(self): if not self.mainwindow.use_segment_anything: return + if self.image_data is None: + return if not (self.image_data.ndim == 3 and self.image_data.shape[-1] == 3): return if len(self.click_points) > 0 and len(self.click_points_mode) > 0: - masks = self.mainwindow.segany.predict(self.click_points, self.click_points_mode) + masks = self.mainwindow.segany.predict_with_point_prompt(self.click_points, self.click_points_mode) self.masks = masks color = np.array([0, 0, 255]) h, w = masks.shape[-2:] mask_image = masks.reshape(h, w, 1) * color.reshape(1, 1, -1) mask_image = mask_image.astype("uint8") mask_image = cv2.cvtColor(mask_image, cv2.COLOR_BGR2RGB) - mask_image = cv2.addWeighted(self.image_data, 0.5, mask_image, 0.9, 0) + # 这里通过调整原始图像的权重self.mask_alpha,来调整mask的明显程度。 + mask_image = cv2.addWeighted(self.image_data, self.mask_alpha, mask_image, 1, 0) mask_image = QtGui.QImage(mask_image[:], mask_image.shape[1], mask_image.shape[0], mask_image.shape[1] * 3, QtGui.QImage.Format_RGB888) mask_pixmap = QtGui.QPixmap(mask_image) diff --git a/widgets/category_choice_dialog.py b/widgets/category_choice_dialog.py index 37412eb..30e7d1b 100644 --- a/widgets/category_choice_dialog.py +++ b/widgets/category_choice_dialog.py @@ -88,7 +88,7 @@ class CategoryChoiceDialog(QtWidgets.QDialog, Ui_Dialog): for vertex in self.scene.current_graph.vertexs: vertex.setZValue(len(self.mainwindow.polygons)) - self.mainwindow.labels_dock_widget.update_listwidget() + self.mainwindow.annos_dock_widget.update_listwidget() self.scene.current_graph = None self.scene.change_mode_to_view() diff --git a/widgets/category_dock_widget.py b/widgets/category_dock_widget.py new file mode 100644 index 0000000..fac3498 --- /dev/null +++ b/widgets/category_dock_widget.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# @Author : LG + +from PyQt5 import QtWidgets, QtCore +from ui.category_dock import Ui_Form + + +class CategoriesDockWidget(QtWidgets.QWidget, Ui_Form): + def __init__(self, mainwindow): + super(CategoriesDockWidget, self).__init__() + self.setupUi(self) + self.mainwindow = mainwindow + self.listWidget.itemClicked.connect(self.item_choice) + + def update_widget(self): + self.listWidget.clear() + btngroup = QtWidgets.QButtonGroup(self) + labels = self.mainwindow.cfg.get('label', []) + for index in range(len(labels)): + label = labels[index] + name = label.get('name', 'UNKNOW') + color = label.get('color', '#000000') + item = QtWidgets.QListWidgetItem() + item.setSizeHint(QtCore.QSize(200, 30)) + widget = QtWidgets.QWidget() + + layout = QtWidgets.QHBoxLayout() + layout.setContentsMargins(9, 1, 9, 1) + + label_color = QtWidgets.QLabel() + label_color.setFixedWidth(10) + label_color.setStyleSheet("background-color: {};".format(color)) + label_color.setObjectName('label_color') + + label_radio = QtWidgets.QRadioButton('{}'.format(name)) + label_radio.setObjectName('label_radio') + label_radio.toggled.connect(self.radio_choice) + btngroup.addButton(label_radio) + if name == '__background__': + label_radio.setChecked(True) + + layout.addWidget(label_color) + layout.addWidget(label_radio) + widget.setLayout(layout) + + self.listWidget.addItem(item) + self.listWidget.setItemWidget(item, widget) + + def radio_choice(self): + if isinstance(self.sender(), QtWidgets.QRadioButton): + if self.sender().isChecked(): + self.mainwindow.current_category = self.sender().text() + + def item_choice(self, item_now): + for index in range(self.listWidget.count()): + item = self.listWidget.item(index) + widget = self.listWidget.itemWidget(item) + label_radio = widget.findChild(QtWidgets.QRadioButton, 'label_radio') + label_radio.setChecked(item==item_now) diff --git a/widgets/category_edit_dialog.py b/widgets/category_edit_dialog.py index 18aabae..a6d8ae8 100644 --- a/widgets/category_edit_dialog.py +++ b/widgets/category_edit_dialog.py @@ -99,7 +99,7 @@ class CategoryEditDialog(QtWidgets.QDialog, Ui_Dialog): # 设置polygon 属性 self.polygon.set_drawed(category, group, is_crowd, note, QtGui.QColor(self.mainwindow.category_color_dict.get(category, '#000000'))) - self.mainwindow.labels_dock_widget.update_listwidget() + self.mainwindow.annos_dock_widget.update_listwidget() self.polygon = None self.scene.change_mode_to_view() diff --git a/widgets/mainwindow.py b/widgets/mainwindow.py index d749d75..99bad76 100644 --- a/widgets/mainwindow.py +++ b/widgets/mainwindow.py @@ -6,7 +6,8 @@ from ui.MainWindow import Ui_MainWindow from widgets.setting_dialog import SettingDialog from widgets.category_choice_dialog import CategoryChoiceDialog from widgets.category_edit_dialog import CategoryEditDialog -from widgets.labels_dock_widget import LabelsDockWidget +from widgets.category_dock_widget import CategoriesDockWidget +from widgets.annos_dock_widget import AnnosDockWidget from widgets.files_dock_widget import FilesDockWidget from widgets.info_dock_widget import InfoDockWidget from widgets.right_button_menu import RightButtonMenu @@ -33,7 +34,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): def __init__(self): super(MainWindow, self).__init__() self.setupUi(self) - self.init_ui() + self.image_root: str = None self.label_root:str = None @@ -41,6 +42,9 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.current_index = None self.current_file_index: int = None + self.current_label = '__background__' + self.current_group = 1 + self.config_file = CONFIG_FILE if os.path.exists(CONFIG_FILE) else DEFAULT_CONFIG_FILE self.saved = True self.can_be_annotated = True @@ -52,46 +56,58 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.map_mode = MAPMode.LABEL # 标注目标 self.current_label:Annotation = None + self.use_segment_anything = False + self.init_ui() self.reload_cfg() self.init_connect() self.reset_action() - self.init_segment_anything() - def init_segment_anything(self): - if os.path.exists('./segment_any/sam_vit_h_4b8939.pth'): - self.statusbar.showMessage('Find the checkpoint named {}.'.format('sam_vit_h_4b8939.pth')) - self.segany = SegAny('./segment_any/sam_vit_h_4b8939.pth') - self.use_segment_anything = True - elif os.path.exists('./segment_any/sam_vit_l_0b3195.pth'): - self.statusbar.showMessage('Find the checkpoint named {}.'.format('sam_vit_l_0b3195.pth')) - self.segany = SegAny('./segment_any/sam_vit_l_0b3195.pth') - self.use_segment_anything = True - elif os.path.exists('./segment_any/sam_vit_b_01ec64.pth'): - self.statusbar.showMessage('Find the checkpoint named {}.'.format('sam_vit_b_01ec64.pth')) - self.segany = SegAny('./segment_any/sam_vit_b_01ec64.pth') - self.use_segment_anything = True - else: - QtWidgets.QMessageBox.warning(self, 'Warning', 'The checkpoint of [Segment anything] not existed. If you want use quick annotate, please download from {}'.format('https://github.com/facebookresearch/segment-anything#model-checkpoints')) + def init_segment_anything(self, model_name, reload=False): + if model_name == '': self.use_segment_anything = False + for name, action in self.pths_actions.items(): + action.setChecked(model_name == name) + return + model_path = os.path.join('segment_any', model_name) + if not os.path.exists(model_path): + QtWidgets.QMessageBox.warning(self, 'Warning', + 'The checkpoint of [Segment anything] not existed. If you want use quick annotate, please download from {}'.format( + 'https://github.com/facebookresearch/segment-anything#model-checkpoints')) + for name, action in self.pths_actions.items(): + action.setChecked(model_name == name) + self.use_segment_anything = False + return - if self.use_segment_anything: - if self.segany.device != 'cpu': - self.gpu_resource_thread = GPUResource_Thread() - self.gpu_resource_thread.message.connect(self.labelGPUResource.setText) - self.gpu_resource_thread.start() + self.segany = SegAny(model_path) + self.use_segment_anything = True + self.statusbar.showMessage('Use the checkpoint named {}.'.format(model_name), 3000) + for name, action in self.pths_actions.items(): + action.setChecked(model_name==name) + if not reload: + if self.use_segment_anything: + if self.segany.device != 'cpu': + self.gpu_resource_thread = GPUResource_Thread() + self.gpu_resource_thread.message.connect(self.labelGPUResource.setText) + self.gpu_resource_thread.start() + else: + self.labelGPUResource.setText('cpu') else: - self.labelGPUResource.setText('cpu') - else: - self.labelGPUResource.setText('segment anything unused.') + self.labelGPUResource.setText('segment anything unused.') + + if reload and self.current_index is not None: + self.show_image(self.current_index) def init_ui(self): - # + #q self.setting_dialog = SettingDialog(parent=self, mainwindow=self) - self.labels_dock_widget = LabelsDockWidget(mainwindow=self) - self.labels_dock.setWidget(self.labels_dock_widget) + self.categories_dock_widget = CategoriesDockWidget(mainwindow=self) + self.categories_dock.setWidget(self.categories_dock_widget) + + self.annos_dock_widget = AnnosDockWidget(mainwindow=self) + self.annos_dock.setWidget(self.annos_dock_widget) self.files_dock_widget = FilesDockWidget(mainwindow=self) self.files_dock.setWidget(self.files_dock_widget) @@ -135,6 +151,29 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.labelData.setFixedWidth(150) self.statusbar.addPermanentWidget(self.labelData) + # + model_names = sorted([pth for pth in os.listdir('segment_any') if pth.endswith('.pth')]) + self.pths_actions = {} + for model_name in model_names: + action = QtWidgets.QAction(self) + action.setObjectName("actionZoom_in") + action.triggered.connect(functools.partial(self.init_segment_anything, model_name)) + action.setText("{}".format(model_name)) + action.setCheckable(True) + + self.pths_actions[model_name] = action + self.menuSAM_model.addAction(action) + + self.toolBar.addSeparator() + self.mask_aplha = QtWidgets.QSlider(QtCore.Qt.Orientation.Horizontal, self) + self.mask_aplha.setFixedWidth(50) + self.mask_aplha.setStatusTip('Mask alpha.') + self.mask_aplha.setToolTip('Mask alpha ') + self.mask_aplha.setMaximum(10) + self.mask_aplha.setMinimum(3) + self.mask_aplha.valueChanged.connect(self.change_mask_aplha) + self.toolBar.addWidget(self.mask_aplha) + self.trans = QtCore.QTranslator() def translate(self, language='zh'): @@ -148,7 +187,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): _app.installTranslator(self.trans) self.retranslateUi(self) self.info_dock_widget.retranslateUi(self.info_dock_widget) - self.labels_dock_widget.retranslateUi(self.labels_dock_widget) + self.annos_dock_widget.retranslateUi(self.annos_dock_widget) self.files_dock_widget.retranslateUi(self.files_dock_widget) self.category_choice_widget.retranslateUi(self.category_choice_widget) self.category_edit_widget.retranslateUi(self.category_edit_widget) @@ -182,8 +221,22 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.show_image(self.current_index) language = self.cfg.get('language', 'en') + self.cfg['language'] = language self.translate(language) + contour_mode = self.cfg.get('contour_mode', 'max_only') + self.cfg['contour_mode'] = contour_mode + self.change_contour_mode(contour_mode) + + mask_alpha = self.cfg.get('mask_alpha', 0.5) + self.cfg['mask_alpha'] = mask_alpha + self.mask_aplha.setValue(mask_alpha*10) + + model_name = self.cfg.get('model_name', '') + self.init_segment_anything(model_name) + + self.categories_dock_widget.update_widget() + def set_saved_state(self, is_saved:bool): self.saved = is_saved if self.files_list is not None and self.current_index is not None: @@ -255,7 +308,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): return try: self.polygons.clear() - self.labels_dock_widget.listWidget.clear() + self.annos_dock_widget.listWidget.clear() self.scene.cancel_draw() file_path = os.path.join(self.image_root, self.files_list[index]) image_data = Image.open(file_path) @@ -292,6 +345,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): # load label if self.can_be_annotated: + self.current_group = 1 _, name = os.path.split(file_path) label_path = os.path.join(self.label_root, '.'.join(name.split('.')[:-1]) + '.json') self.current_label = Annotation(file_path, label_path) @@ -299,6 +353,11 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.current_label.load_annotation() for object in self.current_label.objects: + try: + group = int(object.group) + self.current_group = group+1 if group >= self.current_group else self.current_group + except Exception as e: + pass polygon = Polygon() self.scene.addItem(polygon) polygon.load_object(object) @@ -309,7 +368,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): else: self.setWindowTitle('{}'.format(file_path)) - self.labels_dock_widget.update_listwidget() + self.annos_dock_widget.update_listwidget() self.info_dock_widget.update_widget() self.files_dock_widget.set_select(index) self.current_index = index @@ -411,8 +470,8 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): polygon.change_color(QtGui.QColor(self.category_color_dict.get(polygon.category, '#000000'))) polygon.color.setAlpha(255) polygon.setBrush(polygon.color) - self.labels_dock_widget.listWidget.setEnabled(False) - self.labels_dock_widget.checkBox_visible.setEnabled(False) + self.annos_dock_widget.listWidget.setEnabled(False) + self.annos_dock_widget.checkBox_visible.setEnabled(False) self.actionSegment_anything.setEnabled(False) self.actionPolygon.setEnabled(False) self.actionVisible.setEnabled(False) @@ -428,14 +487,16 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): for vertex in polygon.vertexs: vertex.setVisible(False) if polygon.group != '': - rgb = self.instance_cmap[int(polygon.group)] + index = int(polygon.group) + index = index % self.instance_cmap.shape[0] + rgb = self.instance_cmap[index] else: rgb = self.instance_cmap[0] polygon.change_color(QtGui.QColor(rgb[0], rgb[1], rgb[2], 255)) polygon.color.setAlpha(255) polygon.setBrush(polygon.color) - self.labels_dock_widget.listWidget.setEnabled(False) - self.labels_dock_widget.checkBox_visible.setEnabled(False) + self.annos_dock_widget.listWidget.setEnabled(False) + self.annos_dock_widget.checkBox_visible.setEnabled(False) self.actionSegment_anything.setEnabled(False) self.actionPolygon.setEnabled(False) self.actionVisible.setEnabled(False) @@ -454,8 +515,8 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): polygon.change_color(QtGui.QColor(self.category_color_dict.get(polygon.category, '#000000'))) polygon.color.setAlpha(polygon.nohover_alpha) polygon.setBrush(polygon.color) - self.labels_dock_widget.listWidget.setEnabled(True) - self.labels_dock_widget.checkBox_visible.setEnabled(True) + self.annos_dock_widget.listWidget.setEnabled(True) + self.annos_dock_widget.checkBox_visible.setEnabled(True) self.actionSegment_anything.setEnabled(self.use_segment_anything) self.actionPolygon.setEnabled(True) self.actionVisible.setEnabled(True) @@ -468,9 +529,33 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): def set_labels_visible(self, visible=None): if visible is None: - visible = not self.labels_dock_widget.checkBox_visible.isChecked() - self.labels_dock_widget.checkBox_visible.setChecked(visible) - self.labels_dock_widget.set_all_polygon_visible(visible) + visible = not self.annos_dock_widget.checkBox_visible.isChecked() + self.annos_dock_widget.checkBox_visible.setChecked(visible) + self.annos_dock_widget.set_all_polygon_visible(visible) + + def change_contour_mode(self, contour_mode='max_only'): + if contour_mode == 'max_only': + self.scene.change_contour_mode_to_save_max_only() + elif contour_mode == 'external': + self.scene.change_contour_mode_to_save_external() + self.statusbar.showMessage('Save all external contours will bring some noise.', 3000) + elif contour_mode == 'all': + self.scene.change_contour_mode_to_save_all() + self.statusbar.showMessage('Category of inner contour will be set _background__.', 3000) + else: + self.scene.change_contour_mode_to_save_max_only() + self.statusbar.showMessage('The contour mode [{}] not support.'.format(contour_mode), 3000) + + self.actionContour_Max_only.setChecked(contour_mode == 'max_only') + self.actionContour_External.setChecked(contour_mode == 'external') + self.actionContour_All.setChecked(contour_mode == 'all') + self.cfg['contour_mode'] = contour_mode + + def change_mask_aplha(self): + value = self.mask_aplha.value() / 10 + self.scene.mask_alpha = value + self.scene.update_mask() + self.cfg['mask_alpha'] = value def ISAT_to_VOC(self): self.ISAT_to_VOC_dialog.reset_gui() @@ -529,6 +614,10 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.actionBit_map.triggered.connect(self.change_bit_map) self.actionVisible.triggered.connect(functools.partial(self.set_labels_visible, None)) + self.actionContour_Max_only.triggered.connect(functools.partial(self.change_contour_mode, 'max_only')) + self.actionContour_External.triggered.connect(functools.partial(self.change_contour_mode, 'external')) + self.actionContour_All.triggered.connect(functools.partial(self.change_contour_mode, 'all')) + self.actionToVOC.triggered.connect(self.ISAT_to_VOC) self.actionToCOCO.triggered.connect(self.ISAT_to_COCO) self.actionTo_LabelMe.triggered.connect(self.ISAT_to_LABELME) @@ -540,7 +629,7 @@ class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): self.actionChinese.triggered.connect(self.translate_to_chinese) self.actionEnglish.triggered.connect(self.translate_to_english) - self.labels_dock_widget.listWidget.doubleClicked.connect(self.scene.edit_polygon) + self.annos_dock_widget.listWidget.doubleClicked.connect(self.scene.edit_polygon) def reset_action(self): self.actionPrev.setEnabled(False) diff --git a/widgets/polygon.py b/widgets/polygon.py index 2e0531c..2699b9a 100644 --- a/widgets/polygon.py +++ b/widgets/polygon.py @@ -133,7 +133,7 @@ class Polygon(QtWidgets.QGraphicsPolygonItem): else: self.color.setAlpha(self.nohover_alpha) self.setBrush(self.color) - self.scene().mainwindow.labels_dock_widget.set_selected(self) # 更新label面板 + self.scene().mainwindow.annos_dock_widget.set_selected(self) # 更新label面板 if change == QtWidgets.QGraphicsItem.GraphicsItemChange.ItemPositionChange: # ItemPositionHasChanged bias = value