{"payload":{"feedbackUrl":"https://github.com/orgs/community/discussions/53140","repo":{"id":705678696,"defaultBranch":"master","name":"openvino.genai","ownerLogin":"openvinotoolkit","currentUserCanPush":false,"isFork":false,"isEmpty":false,"createdAt":"2023-10-16T13:38:16.000Z","ownerAvatar":"https://avatars.githubusercontent.com/u/55443902?v=4","public":true,"private":false,"isOrgOwned":true},"refInfo":{"name":"","listCacheKey":"v0:1719599426.0","currentOid":""},"activityList":{"items":[{"before":"a4d0d4d1999d769f862c1f3bbe860f1fa544b724","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-558-4a2e3ce8440465a9ca784e25958c588779147672","pushedAt":"2024-06-28T18:30:26.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":null,"after":"a4d0d4d1999d769f862c1f3bbe860f1fa544b724","ref":"refs/heads/gh-readonly-queue/master/pr-558-4a2e3ce8440465a9ca784e25958c588779147672","pushedAt":"2024-06-28T17:05:37.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Reuse Tokenizer (#558)\n\nIt's now required to build the whole project to get continuous batching.\nContinuous batching is going to be merged into the main library over\ntime.","shortMessageHtmlLink":"Reuse Tokenizer (#558)"}},{"before":"f73b02917bb85fdfa1e5be67859288be8315672b","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-558-4a2e3ce8440465a9ca784e25958c588779147672","pushedAt":"2024-06-28T16:58:03.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":null,"after":"f73b02917bb85fdfa1e5be67859288be8315672b","ref":"refs/heads/gh-readonly-queue/master/pr-558-4a2e3ce8440465a9ca784e25958c588779147672","pushedAt":"2024-06-28T15:23:39.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Reuse Tokenizer (#558)\n\nIt's now required to build the whole project to get continuous batching.\nContinuous batching is going to be merged into the main library over\ntime.","shortMessageHtmlLink":"Reuse Tokenizer (#558)"}},{"before":"4a2e3ce8440465a9ca784e25958c588779147672","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-524-b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","pushedAt":"2024-06-28T04:04:50.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","after":"4a2e3ce8440465a9ca784e25958c588779147672","ref":"refs/heads/master","pushedAt":"2024-06-28T04:04:49.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"openvino and openvino-tokenizers use the same nightly versions (#524)\n\nCo-authored-by: Ilya Lavrenov \nCo-authored-by: Chen Peter ","shortMessageHtmlLink":"openvino and openvino-tokenizers use the same nightly versions (#524)"}},{"before":null,"after":"4a2e3ce8440465a9ca784e25958c588779147672","ref":"refs/heads/gh-readonly-queue/master/pr-524-b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","pushedAt":"2024-06-28T02:46:14.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"openvino and openvino-tokenizers use the same nightly versions (#524)\n\nCo-authored-by: Ilya Lavrenov \nCo-authored-by: Chen Peter ","shortMessageHtmlLink":"openvino and openvino-tokenizers use the same nightly versions (#524)"}},{"before":"b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-543-87a1a3b0d98f95c242bc59c75f9804c58c4834e1","pushedAt":"2024-06-27T21:02:19.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"87a1a3b0d98f95c242bc59c75f9804c58c4834e1","after":"b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","ref":"refs/heads/master","pushedAt":"2024-06-27T21:02:18.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Bump version (#543)","shortMessageHtmlLink":"Bump version (#543)"}},{"before":null,"after":"b06f73e1f1b7e5a5a7cc38647b2473b246f4173e","ref":"refs/heads/gh-readonly-queue/master/pr-543-87a1a3b0d98f95c242bc59c75f9804c58c4834e1","pushedAt":"2024-06-27T19:37:41.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Bump version (#543)","shortMessageHtmlLink":"Bump version (#543)"}},{"before":"87a1a3b0d98f95c242bc59c75f9804c58c4834e1","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-559-309d49e8776d0503ed89b703f403b16dc22468d7","pushedAt":"2024-06-27T14:38:09.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"309d49e8776d0503ed89b703f403b16dc22468d7","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-556-0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","pushedAt":"2024-06-27T14:38:09.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","after":"87a1a3b0d98f95c242bc59c75f9804c58c4834e1","ref":"refs/heads/master","pushedAt":"2024-06-27T14:38:08.000Z","pushType":"merge_queue_merge","commitsCount":2,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Increase timeout (#559)\n\nDebug mac doesn't make it in time sometimes","shortMessageHtmlLink":"Increase timeout (#559)"}},{"before":null,"after":"87a1a3b0d98f95c242bc59c75f9804c58c4834e1","ref":"refs/heads/gh-readonly-queue/master/pr-559-309d49e8776d0503ed89b703f403b16dc22468d7","pushedAt":"2024-06-27T14:36:23.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Increase timeout (#559)\n\nDebug mac doesn't make it in time sometimes","shortMessageHtmlLink":"Increase timeout (#559)"}},{"before":null,"after":"309d49e8776d0503ed89b703f403b16dc22468d7","ref":"refs/heads/gh-readonly-queue/master/pr-556-0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","pushedAt":"2024-06-27T13:46:32.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"[CONTINIOUS BATCHING] Wa x2 memory allocation (#556)","shortMessageHtmlLink":"[CONTINIOUS BATCHING] Wa x2 memory allocation (#556)"}},{"before":"0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-552-1c12379e2c332d7aecf25f1322d8d18b3168c486","pushedAt":"2024-06-26T19:59:49.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"1c12379e2c332d7aecf25f1322d8d18b3168c486","after":"0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","ref":"refs/heads/master","pushedAt":"2024-06-26T19:59:48.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"[Continuous Batching] Changes in notify_handle() + simple metrics reporting (#552)\n\nChanges:\n- switching operation order in notify_handle, so that generation status\nis set before sending out last token, so user will never get status\nRUNNING when generation is already done and no new tokens will come\n- add simple metrics giving some basic information about pipeline state\nlike number of all requests, number of running request, cache usage etc.","shortMessageHtmlLink":"[Continuous Batching] Changes in notify_handle() + simple metrics rep…"}},{"before":null,"after":"0a56430e8ed958ff8590d74d9e872f45e2b0bf5e","ref":"refs/heads/gh-readonly-queue/master/pr-552-1c12379e2c332d7aecf25f1322d8d18b3168c486","pushedAt":"2024-06-26T18:33:24.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"[Continuous Batching] Changes in notify_handle() + simple metrics reporting (#552)\n\nChanges:\n- switching operation order in notify_handle, so that generation status\nis set before sending out last token, so user will never get status\nRUNNING when generation is already done and no new tokens will come\n- add simple metrics giving some basic information about pipeline state\nlike number of all requests, number of running request, cache usage etc.","shortMessageHtmlLink":"[Continuous Batching] Changes in notify_handle() + simple metrics rep…"}},{"before":"e33ae600b73c7916b0e105b496260dedda35144a","after":"1c12379e2c332d7aecf25f1322d8d18b3168c486","ref":"refs/heads/master","pushedAt":"2024-06-26T18:31:51.000Z","pushType":"pr_merge","commitsCount":4,"pusher":{"login":"ilya-lavrenov","name":"Ilya Lavrenov","path":"/ilya-lavrenov","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/2566854?s=80&v=4"},"commit":{"message":"Fix","shortMessageHtmlLink":"Fix"}},{"before":"e33ae600b73c7916b0e105b496260dedda35144a","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-491-8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","pushedAt":"2024-06-26T16:49:22.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","after":"e33ae600b73c7916b0e105b496260dedda35144a","ref":"refs/heads/master","pushedAt":"2024-06-26T16:49:22.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Integrate static shape LLM execution pipeline (#491)\n\n# Overview\n\nSince static shape pipeline is quite different from what is done for\nCPU/GPU plugins, it makes sense to separate into different\nimplementation not to mix with existing one.\n\nMoved all common things into `LLMPipelineImplBase` with intent to\ninherit static shape implementation from it.\n\n## Static pipeline approach details\n- Both prefill / kvcache models are reshaped to static size at the\npipeline initialization stage (`StaticLLMPipeline` ctor) and has\nhardcoded size of `1024`. Due to this, only `1024` tokens in total\n(`max_tokens`) can be handled.\n- Chat conversation mode is not supported. KV-cache is reset on every\n\"generate\" call.\n- Only `greedy` search decoding is handled for now\n- Only batch size 1 is supported\n\n## Examples\n\n```\nov::genai::LLMPipeline pipe(model_path, \"NPU\");\nov::genai::GenerationConfig config = pipe.get_generation_config();\npipe.generate(\"Why is the Sun yellow?\", config)\n``` \nor with `streamer`\n```\nstd::function streamer = [](std::string word) { std::cout << word << std::flush; return false; }; \n...\npipe.generate(\"Why is the Sun yellow?\", config, streamer)\n```\nor on raw `input_ids`:\n```\nauto tokenizer = pipe.get_tokenizer();\nauto encoded = tokenizer.encode(prompt);\npipe.generate(encoded.input_ids, config, streamer);\n```\n\n**Note:** Batched input isn't supported:\n```\nstd::vector texts = { \"table is made of\", \"Alan Turing was a\" };\npipe.generate(texts config, streamer); // Throw: Currently only batch size=1 is supported\n```\n\n---------\n\nCo-authored-by: Ilya Lavrenov ","shortMessageHtmlLink":"Integrate static shape LLM execution pipeline (#491)"}},{"before":null,"after":"e33ae600b73c7916b0e105b496260dedda35144a","ref":"refs/heads/gh-readonly-queue/master/pr-491-8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","pushedAt":"2024-06-26T15:28:32.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Integrate static shape LLM execution pipeline (#491)\n\n# Overview\n\nSince static shape pipeline is quite different from what is done for\nCPU/GPU plugins, it makes sense to separate into different\nimplementation not to mix with existing one.\n\nMoved all common things into `LLMPipelineImplBase` with intent to\ninherit static shape implementation from it.\n\n## Static pipeline approach details\n- Both prefill / kvcache models are reshaped to static size at the\npipeline initialization stage (`StaticLLMPipeline` ctor) and has\nhardcoded size of `1024`. Due to this, only `1024` tokens in total\n(`max_tokens`) can be handled.\n- Chat conversation mode is not supported. KV-cache is reset on every\n\"generate\" call.\n- Only `greedy` search decoding is handled for now\n- Only batch size 1 is supported\n\n## Examples\n\n```\nov::genai::LLMPipeline pipe(model_path, \"NPU\");\nov::genai::GenerationConfig config = pipe.get_generation_config();\npipe.generate(\"Why is the Sun yellow?\", config)\n``` \nor with `streamer`\n```\nstd::function streamer = [](std::string word) { std::cout << word << std::flush; return false; }; \n...\npipe.generate(\"Why is the Sun yellow?\", config, streamer)\n```\nor on raw `input_ids`:\n```\nauto tokenizer = pipe.get_tokenizer();\nauto encoded = tokenizer.encode(prompt);\npipe.generate(encoded.input_ids, config, streamer);\n```\n\n**Note:** Batched input isn't supported:\n```\nstd::vector texts = { \"table is made of\", \"Alan Turing was a\" };\npipe.generate(texts config, streamer); // Throw: Currently only batch size=1 is supported\n```\n\n---------\n\nCo-authored-by: Ilya Lavrenov ","shortMessageHtmlLink":"Integrate static shape LLM execution pipeline (#491)"}},{"before":"99524094b3f9e1d24f49e0587ef05d12347ebe34","after":"c73b94be3fa808234d0ac921d48a36a5d437bdf5","ref":"refs/heads/releases/2024/2","pushedAt":"2024-06-26T01:57:37.000Z","pushType":"pr_merge","commitsCount":1,"pusher":{"login":"peterchen-intel","name":"Chen Peter","path":"/peterchen-intel","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/19401820?s=80&v=4"},"commit":{"message":"Add hook sample and restore usage generation config (#553)\n\nFix the following exception which occurs when converting the model and\r\nrunning through the installation requirements_2024.2.txt\r\n(py310_env_2024_2)\r\nopenvino-ci-69@odt-openvino-ci-69:~/wgz/openvino.genai_2024_2/openvino.genai/llm_bench/python$\r\npython benchmark.py -m\r\n/mnt/llm_irs/release_2024.2_5c0f38f8_stateful/llama-2-7b-chat/pytorch/dldt/FP16\r\n[ INFO ] Traceback (most recent call last):\r\nFile\r\n\"/home/openvino-ci-69/wgz/openvino.genai_2024_2/openvino.genai/llm_bench/python/benchmark.py\",\r\nline 636, in main\r\niter_data_list, pretrain_time =\r\nCASE_TO_BENCH[model_args['use_case']](model_path, framework,\r\nargs.device, model_args, args.num_iters)\r\nFile\r\n\"/home/openvino-ci-69/wgz/openvino.genai_2024_2/openvino.genai/llm_bench/python/benchmark.py\",\r\nline 298, in run_text_generation_benchmark\r\ntext_gen_fn(input_text, num, model, tokenizer, args, iter_data_list,\r\nwarmup_md5, prompt_idx, bench_hook, model_precision, proc_id)\r\nFile\r\n\"/home/openvino-ci-69/wgz/openvino.genai_2024_2/openvino.genai/llm_bench/python/benchmark.py\",\r\nline 106, in run_text_generation\r\n result = model.generate(\r\nFile\r\n\"/home/openvino-ci-69/wgz/py310_env_2024_2/lib/python3.10/site-packages/torch/utils/_contextlib.py\",\r\nline 115, in decorate_context\r\n return func(*args, **kwargs)\r\nFile\r\n\"/home/openvino-ci-69/wgz/py310_env_2024_2/lib/python3.10/site-packages/optimum/intel/openvino/modeling_decoder.py\",\r\nline 642, in generate\r\n result = super().generate(\r\nFile\r\n\"/home/openvino-ci-69/wgz/py310_env_2024_2/lib/python3.10/site-packages/torch/utils/_contextlib.py\",\r\nline 115, in decorate_context\r\n return func(*args, **kwargs)\r\nFile\r\n\"/home/openvino-ci-69/wgz/py310_env_2024_2/lib/python3.10/site-packages/transformers/generation/utils.py\",\r\nline 1576, in generate\r\n result = self._greedy_search(\r\nFile\r\n\"/home/openvino-ci-69/wgz/openvino.genai_2024_2/openvino.genai/llm_bench/python/utils/hook_greedy_search.py\",\r\nline 281, in new_greedy_search\r\nraise ValueError(\"If `eos_token_id` is defined, make sure that\r\n`pad_token_id` is defined.\")\r\nValueError: If `eos_token_id` is defined, make sure that `pad_token_id`\r\nis defined.","shortMessageHtmlLink":"Add hook sample and restore usage generation config (#553)"}},{"before":"8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-548-6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","pushedAt":"2024-06-25T13:00:48.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","after":"8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","ref":"refs/heads/master","pushedAt":"2024-06-25T13:00:48.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"./samples/cpp/requirements.txt->./samples/requirements.txt (#548)\n\nClose https://github.com/openvinotoolkit/openvino.genai/issues/547","shortMessageHtmlLink":"./samples/cpp/requirements.txt->./samples/requirements.txt (#548)"}},{"before":null,"after":"8ab65c3b3a19cfd4df5c2f0ef2368dbd5ca9a86f","ref":"refs/heads/gh-readonly-queue/master/pr-548-6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","pushedAt":"2024-06-25T12:55:32.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"./samples/cpp/requirements.txt->./samples/requirements.txt (#548)\n\nClose https://github.com/openvinotoolkit/openvino.genai/issues/547","shortMessageHtmlLink":"./samples/cpp/requirements.txt->./samples/requirements.txt (#548)"}},{"before":"6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-550-6fb68c04c24038fa46f24b4cebee7eb42383f378","pushedAt":"2024-06-24T22:59:00.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}},{"before":"6fb68c04c24038fa46f24b4cebee7eb42383f378","after":"6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","ref":"refs/heads/master","pushedAt":"2024-06-24T22:58:59.000Z","pushType":"merge_queue_merge","commitsCount":1,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Finalize https://github.com/openvinotoolkit/openvino.genai/pull/544/ (#550)","shortMessageHtmlLink":"Finalize #544 (#550)"}},{"before":null,"after":"6d0ebc9e0a79e63773ca611e6ab5e9f631384ee7","ref":"refs/heads/gh-readonly-queue/master/pr-550-6fb68c04c24038fa46f24b4cebee7eb42383f378","pushedAt":"2024-06-24T21:43:27.000Z","pushType":"branch_creation","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"},"commit":{"message":"Finalize https://github.com/openvinotoolkit/openvino.genai/pull/544/ (#550)","shortMessageHtmlLink":"Finalize #544 (#550)"}},{"before":"6fb68c04c24038fa46f24b4cebee7eb42383f378","after":null,"ref":"refs/heads/gh-readonly-queue/master/pr-538-174c3605d88d8f3daf85fc3a473d7a361443f6b7","pushedAt":"2024-06-24T17:16:02.000Z","pushType":"branch_deletion","commitsCount":0,"pusher":{"login":"github-merge-queue[bot]","name":null,"path":"/apps/github-merge-queue","primaryAvatarUrl":"https://avatars.githubusercontent.com/u/9919?s=80&v=4"}}],"hasNextPage":true,"hasPreviousPage":false,"activityType":"all","actor":null,"timePeriod":"all","sort":"DESC","perPage":30,"cursor":"djE6ks8AAAAEciIkJgA","startCursor":null,"endCursor":null}},"title":"Activity · openvinotoolkit/openvino.genai"}