<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Chunk large amounts of data (including files) in Workato Pros Discussion Board</title>
    <link>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8103#M3405</link>
    <description>&lt;P&gt;Could you provide more information? Your scenario and question are not clear.&lt;/P&gt;</description>
    <pubDate>Thu, 24 Oct 2024 15:49:56 GMT</pubDate>
    <dc:creator>gary1</dc:creator>
    <dc:date>2024-10-24T15:49:56Z</dc:date>
    <item>
      <title>Chunk large amounts of data (including files)</title>
      <link>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8102#M3404</link>
      <description>&lt;P&gt;&lt;SPAN&gt;&lt;EM&gt;(Chunk large amounts of data (including files) to make it easier to perform downstream operations. For example: breaking a batch of 5000 records into chunks of 500 to prevent an API timeout.)&lt;/EM&gt;&lt;BR /&gt;I tried above example by taking 5000 records and with 5 columns to chunk to 500 within python code it took 1.3 sec ,&amp;nbsp; so I need the conclusion that where to chunk bulk records ?. whether within python code or from external, even after exceeding 90sec or to prevent API timeout.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 24 Oct 2024 09:25:59 GMT</pubDate>
      <guid>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8102#M3404</guid>
      <dc:creator>Patel0786</dc:creator>
      <dc:date>2024-10-24T09:25:59Z</dc:date>
    </item>
    <item>
      <title>Re: Chunk large amounts of data (including files)</title>
      <link>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8103#M3405</link>
      <description>&lt;P&gt;Could you provide more information? Your scenario and question are not clear.&lt;/P&gt;</description>
      <pubDate>Thu, 24 Oct 2024 15:49:56 GMT</pubDate>
      <guid>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8103#M3405</guid>
      <dc:creator>gary1</dc:creator>
      <dc:date>2024-10-24T15:49:56Z</dc:date>
    </item>
    <item>
      <title>Re: Chunk large amounts of data (including files)</title>
      <link>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8115#M3412</link>
      <description>&lt;P&gt;I have records more than 5000 which is taking more than 90 sec to execute and is getting timeout. How do i make use of phython code block record spliting function in phython to send the records in batch.&lt;BR /&gt;Provided in documentation.&lt;BR /&gt;&lt;A href="https://docs.workato.com/connectors/python.html" target="_blank"&gt;https://docs.workato.com/connectors/python.html&lt;/A&gt;&lt;BR /&gt;&lt;SPAN&gt;&amp;nbsp;For example: breaking a batch of 5000 records into chunks of 500 to prevent an API timeout.&lt;/SPAN&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 28 Oct 2024 04:32:34 GMT</pubDate>
      <guid>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8115#M3412</guid>
      <dc:creator>Patel0786</dc:creator>
      <dc:date>2024-10-28T04:32:34Z</dc:date>
    </item>
    <item>
      <title>Re: Chunk large amounts of data (including files)</title>
      <link>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8116#M3413</link>
      <description>&lt;P&gt;Hi&amp;nbsp;&lt;a href="https://systematic.workato.com/t5/user/viewprofilepage/user-id/11127"&gt;@Patel0786&lt;/a&gt;&amp;nbsp;,&lt;/P&gt;&lt;P&gt;Solution_1 (&lt;STRONG&gt;&lt;EM&gt;without using Python code_ )&lt;/EM&gt;&lt;/STRONG&gt;&amp;nbsp;Once you receive the payload request try to loop (for loop) as&amp;nbsp; batch process over the records, mention &lt;STRONG&gt;500&lt;/STRONG&gt; as your batch size and send the request to Async function call to process it. Process it repeats for the remaining records...&amp;nbsp;&lt;BR /&gt;&lt;BR /&gt;Thanks and regards,&lt;BR /&gt;Shivakumara A&lt;/P&gt;</description>
      <pubDate>Mon, 28 Oct 2024 06:13:35 GMT</pubDate>
      <guid>https://systematic.workato.com/t5/workato-pros-discussion-board/chunk-large-amounts-of-data-including-files/m-p/8116#M3413</guid>
      <dc:creator>shivakumara</dc:creator>
      <dc:date>2024-10-28T06:13:35Z</dc:date>
    </item>
  </channel>
</rss>

