<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:syn="http://purl.org/rss/1.0/modules/syndication/" xmlns="http://purl.org/rss/1.0/">




    



<channel rdf:about="https://cis-india.org/internet-governance/blog/online-anonymity/search_rss">
  <title>We are anonymous, we are legion</title>
  <link>https://cis-india.org</link>
  
  <description>
    
            These are the search results for the query, showing results 446 to 460.
        
  </description>
  
  
  
  
  <image rdf:resource="https://cis-india.org/logo.png"/>

  <items>
    <rdf:Seq>
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/cis-comments-recommendations-to-digital-data-protection-bill"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/cis-comments-and-recommendations-to-report-on-ai-governance-guidelines-development"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/the-centre-for-internet-and-society2019s-comments-and-recommendations-to-the-indian-privacy-code-2018"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/cis-comments-and-feedback-to-digital-personal-data-protection-rules-2025"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/cis-joins-worldwide-campaign-to-discover-depth-of-gchq-illegal-spying"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/cis-joins-gni"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/central-monitoring-system-questions-to-be-asked-in-parliament"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/hindu-businessline-april-3-2015-sibi-arasu-the-block-heads"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/factor-daily-anand-murali-august-13-2018-the-big-eye"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/bloomberg-alex-mathew-october-20-2016-the-big-debit-card-breach"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/the-benefits-and-challenges-of-the-201cfree-flow201d-of-data"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/news/sant-ox-ac-uk-may-31-2013-bapsybanoo-marchioness-winchester-lectures"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/awesom-contracts-project"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/the-audacious-right-to-be-forgotten"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/iacs-summer-school-2012"/>
        
    </rdf:Seq>
  </items>

</channel>


    <item rdf:about="https://cis-india.org/internet-governance/blog/cis-comments-recommendations-to-digital-data-protection-bill">
    <title>The Centre for Internet and Society’s comments and recommendations to the: The Digital Data Protection Bill 2022</title>
    <link>https://cis-india.org/internet-governance/blog/cis-comments-recommendations-to-digital-data-protection-bill</link>
    <description>
        &lt;b&gt;The Centre for Internet &amp; Society (CIS) published its comments and recommendations to the Digital Personal Data Protection Bill, 2022, on December 17, 2022.&lt;/b&gt;
        &lt;div class="WordSection1" style="text-align: justify; "&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p align="center" class="MsoNormal" style="text-align:center; "&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p align="right" class="MsoNormal" style="text-align:right; "&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;h1&gt;&lt;span&gt;High Level Comments&lt;/span&gt;&lt;/h1&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;1.&lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/b&gt;&lt;b&gt;&lt;span&gt;Rationale for removing the distinction between personal data and sensitive personal data is unclear.&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;All the earlier iterations of the Bill as well as the rules made under Section 43A of the Information Technology Act, 2000&lt;a href="#_ftn1" name="_ftnref1"&gt;&lt;sup&gt;&lt;sup&gt;&lt;span&gt;[1]&lt;/span&gt;&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; had classified data into two categories; (i) personal data; and (ii) sensitive personal data. The 2022 version of the Bill has removed this distinction and clubbed all personal data under one umbrella heading of personal data. The rationale for this is unclear, as sensitive personal data means such data which could reveal or be related to eminently private data such as financial data, health data, sexual orientations and biometric data. Considering the sensitive nature of the data, the data classified as sensitive personal data is accorded higher protection and safeguards from processing, therefore by clubbing all data as personal data, the higher protection such as the need for explicit consent to the processing of sensitive personal data, the bar on processing of sensitive personal data for employment purposes has also been removed. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;2.&lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;/b&gt;&lt;b&gt;&lt;span&gt;No clear roadmap for the implementation of the Bill&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The 2018 Bill had specified a roadmap for the different provisions of the Bill to come into effect from the date of the Act being notified.&lt;a href="#_ftn2" name="_ftnref2"&gt;&lt;sup&gt;&lt;sup&gt;&lt;span&gt;[2]&lt;/span&gt;&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; It specifically stated the time period within which the Authority had to be established and the subsequent rules and regulations notified. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The present Bill does not specify any such blueprint; it does not provide any details on either when the Bill will be notified or the time period within which the Board shall be established and specific Rules and regulations notified. Considering that certain provisions have been deferred to Rules that have to be framed by the Central government, the absence and/or delayed notification of such rules and regulations will impact the effective functioning of the Bill. Provisions such as Section 10(1) which deals with verifiable parental consent for data of children,  Section 13 (1) which states the manner in which a Data Principal can initiate a right to correction, the process of selection and functioning of consent manager under &lt;/span&gt;&lt;span&gt;3(7)&lt;/span&gt;&lt;span&gt; are few such examples, that when the Act becomes applicable, the data principal will have to wait for the Rules to Act of these provisions, or to get clarity on entities created by the Act. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The absence of any sunrise or sunset provision may disincentivise political or industrial will to support or enforce the provisions of the Bill. An example of such a lack of political will was the establishment of the Cyber Appellate Tribunal. The tribunal was established in 2006 to redress cyber fraud. However, it was virtually a defunct body from 2011 onwards when the last chairperson retired. It was eventually merged with the Telecom Dispute Settlement and Appellate Tribunal in 2017. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;We recommend that Bill clearly lays out a time period for the implementation of the different provisions of the Bill, especially a time frame for the establishment of the Board. This is important to give full and effective effect to the right of privacy of the individual. It is also important to ensure that individuals have an effective mechanism to enforce the right and seek recourse in case of any breach of obligations by the data fiduciaries. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The Board must ensure that Data Principals and Fiduciaries have sufficient awareness of the provisions of this Bill before bringing the provisions for punishment into force. This will allow the Data Fiduciaries to align their practices with the provisions of this new legislation and the Board will also have time to define and determine certain provisions that the Bill has left the Board to define. Additionally enforcing penalties for offenses initially must be in a staggered process, combined with provisions such as warnings, in order to allow first time and mistaken offenders which now could include data principals as well, from paying a high price. This will relieve the fear of smaller companies and startups and individuals who might fear processing data for the fear of paying penalties for offenses.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;h3&gt;&lt;a name="_kn12ecl3pdrp"&gt;&lt;/a&gt;&lt;span&gt;3.&lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;span&gt;Independence of  Data Protection Board of India.&lt;/span&gt;&lt;/h3&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The Bill proposes the creation of the Data Protection Board of India (Board) in place of the Data Protection Authority. In comparison with the powers of the Board with the 2018 and 2019 version of Personal Data Protection Bill, we witness an abrogation of powers of the Board  to be created, in this Bill. Under Clause 19(2), the strength and composition of the Board, the process of selection, the terms and conditions of appointment and service, and the removal of its Chairperson and other Members shall be such as may be prescribed by the Union Government at a later stage. Further as per Clause 19(3), the Chief Executive of the Board will be appointed by the Union Government and the terms and conditions of her service will also be determined by the Union Government. The functions of the Board have also not been specified under the Bill, the Central Government may assign the functions to be performed by the Board.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;In order to govern data protection effectively, there is a need for a responsive market regulator with a strong mandate, ability to act swiftly, and resources. The political nature of  personal data also requires that the governance of data, particularly the rule-making and adjudicatory functions performed by the Board are independent of the Executive. &lt;/span&gt;&lt;/p&gt;
&lt;h1&gt;&lt;a name="_n9jzjnvile8f"&gt;&lt;/a&gt;&lt;span&gt;Chapter Wise Comments and Recommendations &lt;/span&gt;&lt;/h1&gt;
&lt;h2&gt;&lt;a name="_chp7y0vgrjqa"&gt;&lt;/a&gt;&lt;span&gt;CHAPTER I- PRELIMINARY&lt;/span&gt;&lt;/h2&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;&lt;span&gt; &lt;/span&gt;●&lt;span&gt; &lt;/span&gt;&lt;/span&gt;&lt;b&gt;&lt;span&gt;Definition:&lt;/span&gt;&lt;/b&gt;&lt;span&gt; While the Bill has added a few new definitions to the Bill including terms such as gains, loss, consent manager etc. there are a few key definitions that have been removed from the earlier versions of the Bill. The removal of certain definitions in the Bill, eg. sensitive personal data, health data, biometric data, transgender status, creating a legal uncertainty about the application of the Bill. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;With respect to the existing definitions as well the definition of the term ‘harm’ has been significantly reduced to remove harms such as surveillance from the ambit of harms. In addition, with respect of the definition of the term of harms also, the 2019 version of the Bill under Clause 2 (20) the definition provides a non exhaustive list of harms, by using the phrase “harms include”, however in the new definition the phrase has been altered to “harm”, in relation to a Data Principal, means”, thereby removing the possibility of more harms that are not apparent currently from being within the purview of the Act. We recommend that the definition of harms be made into a non-exhaustive list.&lt;br /&gt; &lt;br /&gt; &lt;/span&gt;&lt;/p&gt;
&lt;h2&gt;&lt;a name="_nhwnuzprx0ir"&gt;&lt;/a&gt;&lt;span&gt;CHAPTER II - OBLIGATIONS OF DATA FIDUCIARY&lt;/span&gt;&lt;/h2&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Notice: &lt;/span&gt;&lt;/b&gt;&lt;span&gt;The revised Clause on notice does away with the comprehensive requirements which were laid out under Clause 7 of the PDP Bill 2019. The current clause does not mention in detail what the notice should contain, while stating that that the notice should be itemised. While it can be reasoned that the Data Fiduciary can find the contents of the notice throughout the bill, such as with the rights of the Data Principal, the removal of a detailed list could create uncertainty for Data Fiduciaries. By leaving the finer details of what a notice should contain, it could cause Data Fiduciaries from missing out key information from the list, which in turn provide incomplete information to the Data Principal. Even in terms of Data Fiduciaries they might not know if they are complying with the provisions of the bill, and could result in them invariably being penalised. In addition to this by requiring less work by the Data Fiduciary and processor, the burden falls on the Data Principal to make sure they know how their data is processed and collected. The purpose of this legislation is to create further rights for individuals and consumers, hence the Bill should strive to put the individual at the forefront.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;In addition to this Clause 6(3) of the Bill states &lt;i&gt;“The Data Fiduciary shall give the Data Principal the option to access the information referred to in sub-sections (1) and (2) in English or any language specified in the Eighth Schedule to the Constitution of India.”&lt;/i&gt; While the inclusion of regional language notices is a welcome step, we suggest that the text be revised as follows &lt;i&gt;“The Data Fiduciary shall give the Data Principal the option to access the information referred to in sub-sections (1) and (2) in English&lt;b&gt; and in&lt;/b&gt; any language specified in the Eighth Schedule to the Constitution of India.” &lt;/i&gt;While the main crux of notice is to let the person know before giving consent, notice in a language that a person cannot read would not lead to meaningful consent.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Consent &lt;br /&gt; &lt;br /&gt; &lt;/span&gt;&lt;/b&gt;&lt;span&gt;Clause 3 of the Bill states &lt;i&gt;“request for consent would have the contact details of a Data Protection Officer, where applicable, or of any other person authorised by the Data Fiduciary to respond to any communication from the Data Principal for the purpose of exercise of her rights under the provisions of this Act.” &lt;/i&gt;Ideally this provision should be a part of the notice and should be mentioned in the above section. This is similar to Clause 7(1)(c) of the draft Personal Data Protetion Bill 2019 which requires the notice to state &lt;i&gt;“the identity and contact details of the data fiduciary and the contact details of the data protection officer, if applicable;”. &lt;/i&gt;&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Deemed Consent&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The Bill  introduces a new type of consent that was absent in the earlier versions of the Bill. We are of the understanding that deemed consent is used to redefine non consensual processing of personal data. The use of the term deemed consent and the provisions under the section while more concise than the earlier versions could create more confusion for Data Principals and Fiduciaries alike. The definition and the examples do not shed light on one of the key issues with voluntary consent - the absence of notice. In addition to this the Bill is also silent on whether deemed consent can be withdrawn or if the data principal has the same rights as those that come from processing of data they have consented to. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Personal Data Protection of Children &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The age to determine whether a person has the ability to legally consent in the online world has been intertwined with the age of consent under the Indian Contract Act; i.e. 18 years. The Bill makes no distinction between a 5 year old and a 17 year old- both are treated in the same manner. It assumes the same level of maturity for all persons under the age of 18. It is pertinent to note that the law in the offline world does recognise that distinction and also acknowledges the changes in the level of maturity. As per Section 82 of the Indian Penal Code read with Section 83, any act by a child under the age of 12 shall not be considered as an offence. While the maturity of those aged between 12–18 years will be decided by court (individuals between the age of 16–18 years can also be tried as adults for heinous crimes). Similarly, child labour laws in the country allow children above the age of 14 years to work in non-hazardous industry&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;There is  a need to evaluate and rethink the idea that children are passive consumers of the internet and hence the consent of the parent is enough. Additionally, the bracketing of all individuals under the age of 18 as children fails to look at how teenages and young people use the internet. This is more important looking at the 2019 data which suggests that two-thirds of India’s internet users are in the 12–29 years age group, with those in the 12–19 age group accounting for about 21.5% of the total internet usage in metro cities. Given that the pandemic has compelled students and schools to adopt and adapt to virtual schools, the reliance on the internet has become ubiquitous with education. Out of an estimated 504 million internet users, nearly one-third are aged under 19. As per the Annual Status on Education Report (ASER) 2020, more than one-third of all schoolchildren are pursuing digital education, either through online classes or recorded videos.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Instead of setting a blanket age for determining valid consent, we could look at alternative means to determine the appropriate age for children at different levels of maturity, similar to what had been developed by the U.K. Information Commissioner’s Office. The Age Appropriate Code prescribes 15 standards that online services need to follow. It broadly applies to online services "provided for remuneration"—including those supported by online advertising—that process the personal data of and are "likely to be accessed" by children under 18 years of age, even if those services are not targeted at children. This includes apps, search engines, social media platforms, online games and marketplaces, news or educational websites, content streaming services, online messaging services. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The reservation to definition of child under the Bill has also been expressed by some members of the JPC through their dissenting opinion. MP Ritesh Pandey stated that keeping in mind the best interest of the child the Bill should consider a child to be a person who is less than 14 years of age. This would ensure that young people could benefit from the advances in technology without parental consent and reduce the social barriers that young women face in accessing the internet. Similarly Manish Tiwari in his dissenting note also observed that the regulation of the processing of data of children should be based on the type of content or data. The JPC Report observed that the Bill does not require the data fiduciary to take fresh consent of the child, once the child has attained the age of majority, and it also does not give the child the option to withdraw their consent upon reaching the majority age. It therefore, made the following recommendations:&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Registration of data fiduciaries, exclusively dealing with children’s data. Application of the Majority Act to a contract with a child. Obligation of Data fiduciary to inform a child to provide their consent, three months before such child attains majority  Continuation of the services until the child opts out or gives a fresh consent, upon achieving majority. However, these recommendations have not been incorporated into the provisions of the Bill. In addition to this the Bill is silent on the status of non consensual processing and deemed consent with respect to the data of children.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;We recommend that fiduciaries who have services targeted at children should be considered as significant Data Fiduciaries. In addition to this the Bill should also state that the guardians could approach the Data Protection Board on behalf of the child. With these obligations in place, the age of mandatory consent could be reduced and the data fiduciary could have an added responsibility of informing the children in the simplest manner how their data will be used. Such an approach places a responsibility on Data Fiduciaires when implementing services that will be used by children and allows the children to be aware of data processing, when they are interacting with technology.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Chapter III-RIGHTS AND DUTIES OF DATA PRINCIPAL&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Rights of Data Principal&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Clause 12(3) of the Bill while providing the Data Principal the right to be informed of the identities of all the Data Fiduciaries with whom the personal data has been shared, also states that the data principal has the right to be informed of the categories of personal data shared. However the current version of the Bill provides only one category of data that is personal data. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Clause 14 of the Bill talks about the Right of Grievance Redressal, and  states that the Data Principal has the right to readily available means of registering a grievance, however the Bill does not provide in the Notice provisions the need to mention details of a grievance officer or a grievance redressal mechanism. It is only  the additional obligations on significant data fiduciary that mentions the need for a Data Protection officer to be the contact for the grievance redressal mechanism under the provisions of this Bill. The Bill could ideally re-use the provisions of the IT Act SPDI Rules 2011 in which Section 5(7) states &lt;i&gt;“Body corporate shall address any discrepancies and grievances of their provider of the information with respect to processing of information in a time bound manner. For this purpose, the body corporate shall designate a Grievance Officer and publish his name and contact details on its website. The Grievance Officer shall redress the grievances or provider of information expeditiously but within one month ' from the date of receipt of grievance.”&lt;br /&gt; &lt;/i&gt;&lt;br /&gt; The above framing would not only bring clarity to the data fiduciaries on what process to follow for a grievance redressal, it also would reduce the significant burden of theBoard. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Duties of Data Principals&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;The Bill while entisting duties of the Data Principal states that the “Data Principal shall not register a false or frivolous grievance or complaint with a Data Fiduciary or the Board”, however it is very difficult for a Data Principal to and even for the Board to determine what constitutes a “frivolous grievance”. In addition to this the absence of a defined notice provision and the inclusion of deemed consent would mean that the Data Fiduciary could have more information about the matter than the Data Principal. This could mean that the fiduciary could prove that a claim was false or frivolous. Clause 21(12) states that “&lt;i&gt;At any stage after receipt of a complaint, if the Board determines that the complaint is devoid of merit, it may issue a warning or impose costs on the complainant.” &lt;/i&gt;In addition to this Clause 25(1) states that “ &lt;i&gt;If the Board determines on conclusion of an inquiry that non- compliance by &lt;b&gt;a person &lt;/b&gt;is significant, it may, after giving the person a reasonable opportunity of being heard, impose such financial penalty as specified in Schedule 1, not exceeding rupees five hundred crore in each instance.” &lt;/i&gt;The use of the term “person” in this case includes data which could mean that they could be penalised under the provisions of the Bill, which could also include not complying with the duties.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;CHAPTER IV- SPECIAL PROVISIONS&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Transfer of Personal Data outside India&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Clause 17 of the Bill has removed the requirement of data localisation which the 2018 and 2019 Bill required. Personal data can be transferred to countries that will be notified by the central government. There is no need for a copy of the data to be stored locally and no prohibition on transferring sensitive personal data and critical data. Though it is a welcome change that personal data can be transferred outside of India, we would highlight the concerns in permitting unrestricted access to and transfer of all types of data. Certain data such as defence and health data do require sectoral regulation and ringfencing of the transfer of data. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt;Exemptions&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Clause 18 of the Bill has widened the scope of government exemptions. Blanket exemption has been given to the State under Clause 18(4) from deleting the personal data even when the purpose for which the data was collected is no longer served or when retention is no longer necessary. The requirement of &lt;i&gt;proportionality, reasonableness and fairness&lt;/i&gt; have been removed for the Central Government to exempt any department or instrumentality from the ambit of the Bill.&lt;/span&gt;&lt;span&gt; &lt;/span&gt;&lt;span&gt;By doing away with the four pronged test, this provision is not in consonance with test laid down by the Supreme Court and are also incompatible with an effective privacy regulation. There is also no provision for either a prior judicial review  of the order by a district judge as envisaged by the Justice Srikrishna Committee Report or post facto review by an oversight committee of the order as laid down under the Indian Telegraph Rules, 1951&lt;a href="#_ftn3" name="_ftnref3"&gt;&lt;sup&gt;&lt;sup&gt;&lt;span&gt;[3]&lt;/span&gt;&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; and the rules framed under Information Technology Act&lt;a href="#_ftn4" name="_ftnref4"&gt;&lt;sup&gt;&lt;sup&gt;&lt;span&gt;[4]&lt;/span&gt;&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt;. The provision states that such processing of personal data shall be subject to the procedure, safeguard and oversight mechanisms that may be prescribed.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;b&gt;&lt;span&gt; &lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;/div&gt;
&lt;div style="text-align: justify; "&gt;&lt;br clear="all" /&gt; 
&lt;hr align="left" size="1" width="100%" /&gt;
&lt;div id="ftn1"&gt;
&lt;p class="MsoNormal"&gt;&lt;a href="#_ftnref1" name="_ftn1"&gt;&lt;sup&gt;&lt;span&gt;&lt;sup&gt;&lt;span&gt;[1]&lt;/span&gt;&lt;/sup&gt;&lt;/span&gt;&lt;/sup&gt;&lt;/a&gt;&lt;span&gt; Information Technology (Reasonable security practices and procedures and sensitive personal data or information) Rules, 2011&lt;/span&gt;&lt;span&gt;.&lt;/span&gt;&lt;/p&gt;
&lt;/div&gt;
&lt;div id="ftn2"&gt;
&lt;p class="MsoNormal"&gt;&lt;a href="#_ftnref2" name="_ftn2"&gt;&lt;sup&gt;&lt;span&gt;&lt;sup&gt;&lt;span&gt;[2]&lt;/span&gt;&lt;/sup&gt;&lt;/span&gt;&lt;/sup&gt;&lt;/a&gt;&lt;span&gt; Clause 97 of the 2018 Bill states&lt;i&gt;“(1) For the purposes of this Chapter, the term ‘notified date’ refers to the date notified by the Central Government under sub-section (3) of section 1. (2)The notified date shall be any date within twelve months from the date of enactment of this Act. (3)The following provisions shall come into force on the notified date-(a) Chapter X; (b) Section 107; and (c) Section 108. (4)The Central Government shall, no later than three months from the notified date establish the Authority. (5)The Authority shall, no later than twelve months from the notified date notify the grounds of processing of personal data in respect of the activities listed in sub-section (2) of section 17. (6) The Authority shall no, later than twelve months from the date notified date issue codes of practice  on the following matters-(a) notice under section 8; (b) data quality under section 9; (c) storage limitation under section 10; (d) processing of personal data under Chapter III; (e) processing of sensitive personal data under Chapter IV; (f) security safeguards under section 31; (g) research purposes under section 45;(h) exercise of data principal rights under Chapter VI; (i) methods of de-identification and anonymisation; (j) transparency and accountability measures under Chapter VII. (7)Section 40 shall come into force on such date as is notified by the Central Government for the purpose of that section.(8)The remaining provision of the Act shall come into force eighteen months from the notified date.”&lt;/i&gt;&lt;/span&gt;&lt;/p&gt;
&lt;/div&gt;
&lt;div id="ftn3"&gt;
&lt;p class="MsoNormal"&gt;&lt;a href="#_ftnref3" name="_ftn3"&gt;&lt;sup&gt;&lt;span&gt;&lt;sup&gt;&lt;span&gt;[3]&lt;/span&gt;&lt;/sup&gt;&lt;/span&gt;&lt;/sup&gt;&lt;/a&gt;&lt;span&gt; &lt;/span&gt;&lt;span&gt;Rule 419A (16): The Central Government or the State Government shall constitute a Review Committee. &lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt;Rule 419 A(17): The Review Committee shall meet at least once in two months and record its findings whether the directions issued under sub-rule (1) are in accordance with the provisions of sub-section (2) of Section 5 of the said Act. When the Review Committee is of the opinion that the directions are not in accordance with the provisions referred to above it may set aside the directions and orders for destruction of the copies of the intercepted message or class of messages.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;/div&gt;
&lt;div id="ftn4"&gt;
&lt;p class="MsoNormal"&gt;&lt;a href="#_ftnref4" name="_ftn4"&gt;&lt;sup&gt;&lt;span&gt;&lt;sup&gt;&lt;span&gt;[4]&lt;/span&gt;&lt;/sup&gt;&lt;/span&gt;&lt;/sup&gt;&lt;/a&gt;&lt;span&gt; &lt;/span&gt;&lt;span&gt;Rule 22 of Information Technology (Procedure and Safeguards for Interception, Monitoring and Decryption of Information) Rules, 2009: The Review Committee shall meet at least once in two months and record its findings whether the directions issued under rule 3 are in accordance with the provisions of sub-section (2) of section 69 of the Act and where the Review Committee is of the opinion that the directions are not in accordance with the provisions referred to above, it may set aside the directions and issue an order for destruction of the copies, including corresponding electronic record of the intercepted or monitored or decrypted information.&lt;/span&gt;&lt;/p&gt;
&lt;p class="MsoNormal"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;/div&gt;
&lt;/div&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/cis-comments-recommendations-to-digital-data-protection-bill'&gt;https://cis-india.org/internet-governance/blog/cis-comments-recommendations-to-digital-data-protection-bill&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Shweta Mohandas and Pallavi Bedi</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital Governance</dc:subject>
    
    
        <dc:subject>Data Protection</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2023-01-20T02:35:30Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/cis-comments-and-recommendations-to-report-on-ai-governance-guidelines-development">
    <title>The Centre for Internet and Society’s comments and recommendations to the: Report on AI Governance Guidelines Development</title>
    <link>https://cis-india.org/internet-governance/blog/cis-comments-and-recommendations-to-report-on-ai-governance-guidelines-development</link>
    <description>
        &lt;b&gt;The Centre for Internet &amp; Society (CIS) submitted its comments and recommendations on the Report on AI Governance Guidelines Development.&lt;/b&gt;
        
&lt;p&gt;With research assistance by Anuj Singh&lt;/p&gt;
&lt;hr /&gt;
&lt;h2&gt;I. Background&lt;/h2&gt;
&lt;p&gt;On 6 January 2025, a Subcommittee on ‘AI Governance and Guidelines Development’ under the Advisory Group put out the Report on AI Governance Guidelines Development, which advocated for a whole-of-government approach to AI governance. This sub-committee was constituted by the Ministry of Electronics and Information Technology (MeitY) on November 9, 2023, to analyse gaps and offer recommendations for developing a comprehensive framework for governance of Artificial Intelligence (AI). As various AI governance conversations take centre stage, this is a welcome step, and we hope that there are more opportunities through public comments and consultations to improve on this important AI document. &lt;br /&gt;&lt;br /&gt;CIS’ comments are inline with the submission guidelines,&amp;nbsp; we have provided both comments and suggestions based on the headings and text provided in the report.&lt;/p&gt;
&lt;h2&gt;II. Governance of AI&lt;/h2&gt;
&lt;p&gt;The subcommittee report has explained its reasons for staying away from a definition. However, it would be helpful to set the scope of AI, at the outset of the report, given that different AI systems have different roles and functionalities. Having a clearer framework in the beginning can help readers better understand the scope of the conversation in the report. This section also states that AI can now &lt;strong&gt;“&lt;/strong&gt;perform complex tasks without active human control or&amp;nbsp; supervision”, while there are instances where AI is being used without an active human control, there is a need to emphasise on the need for humans in the loop. This has also been highlighted in the &lt;a href="https://oecd.ai/en/dashboards/ai-principles/P6"&gt;OECD AI principles &lt;/a&gt;which this report draws inspiration from.&lt;/p&gt;
&lt;h3&gt;A. AI Governance Principles&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;A proposed list of AI Governance principles (with their explanations) is given&amp;nbsp; below. &lt;/strong&gt;&lt;br /&gt;While referring to the OECD AI principles is a good first step in understanding the global best practices, it is suggested that an exercise in&amp;nbsp; mapping of all global AI principles documents published by international and multinationals organisations and civil society is undertaken,&amp;nbsp; to determine principles that are most important for India. The OECD AI principles also come from regions that have a better internet penetration, and higher literacy rate than India, hence for them the principle of “Digital by design governance” would be possible to be achieved but in India, a digital first approach, especially in governance, could lead to large scale exclusions.&lt;/p&gt;
&lt;h3&gt;B. Considerations to operationalise the principles&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;1. Examining AI systems using a lifecycle approach &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;The sub committee has taken a novel approach to define the AI life cycle. The terms “Development, Deployment and Diffusion” have not been seen in any of the major publications about AI lifecycle. While academicians (e.g. &lt;a href="https://www.sciencedirect.com/org/science/article/pii/S1438887123002224"&gt;Chen et al. (2023&lt;/a&gt;), &lt;a href="https://www.cell.com/patterns/pdfExtended/S2666-3899(22)00074-5"&gt;De Silva and Alahakoon (2022)&lt;/a&gt;) have pointed out that the AI life cycle contains the following stages - design, development and deployment, others &lt;a href="https://www.sciencedirect.com/science/article/pii/S2666389922000745"&gt;(Ng et al. (2022)&lt;/a&gt; have defined it as “data creation, data acquisition, model development, model evaluation and model deployment. Even NASSCOM’s&amp;nbsp; &lt;a href="https://nasscom.in/ai/pdf/the-developer%27s-playbook-for-responsible-ai-in-india.pdf"&gt;Responsible AI Playbook&lt;/a&gt; follows the “conception, designing, development and deployment, as some of the key stages in the AI life cycle. Similarly the OECD also recognised “i) ‘design, data and models’ ii) ‘verification and validation’; iii) ‘deployment’; and iv) ‘operation and monitoring’.” as the phases of the AI life cycle. The subcommittee hence could provide citation as well as a justification of using this novel approach to the AI lifecycle, and state the reason for moving away from the recognised stages. Steering away from an understood approach could cause some confusion amongst different stakeholders who may not be as well versed with AI terminologies and the AI lifecycle to begin with.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;2. Taking an ecosystem-view of AI actors &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;While the report rightly states that multiple actors are involved across the AI lifecycle, it is also important to note that the same actor could also be involved in multiple stages of the AI lifecycle. For example if we take the case of an AI app used for disease diagnosis. The medical professional can be the data principal (using their own data), the data provider (using the app thereby providing the data), and the end user (someone who is using the app for diagnosis). Similarly if we look at the example of a government body,&amp;nbsp; it can be the data provider, the developer (if it is made inhouse or outsourced through tenders), the deployer, as well as the end user. Hence for each AI application there might be multiple actors who play different roles and whose roles might not be static. &lt;br /&gt;&lt;br /&gt;While looking at governance approaches, the approach must ideally not be limited to responsibilities and liabilities, especially when the “data principal” and individual end users are highlighted as actors; the approach should also include rights and means of redressal in order to be a rights based people centric approach to AI governance.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;3. Leveraging technology for governance &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;While the use of techno-legal approach in governance is picking up speed there is a need to look at existing Central and State capacity to undertake this, and also look at what are the ways this could affect people who still do not have access to the internet. One example of a techno legal approach that has seen some success has been the&lt;a href="https://www.techinasia.com/indian-state-running-pilot-put-land-records-blockchain"&gt; Bhumi programme&lt;/a&gt; in Andhra Pradesh that used blockchain for land records,&amp;nbsp; however this also led to the weakening of local institutions, and also led to exclusion of marginalised people &lt;a href="https://www.tandfonline.com/doi/full/10.1080/01436597.2021.2013116"&gt;Kshetri (2021)&lt;/a&gt;. It was also stated that there was a need to strengthen existing institutions before using a technological measure. &lt;br /&gt; &lt;strong&gt;&lt;br /&gt; &lt;/strong&gt;Secondly, while the sub committee has emphasized on the improvements in quality of generative AI tools, there is a need to assess how these tools work for Indian use cases. It was reported last year that ChatGPT could not answer all the questions relating to the Indian civil services exam, and failed to correctly answer questions on geography, however it was able to crack &lt;a href="https://indiaai.gov.in/news/chatgpt-fails-to-clear-the-prestigious-civil-service-examination"&gt;tough exams in the USA.&lt;/a&gt; In addition to this, a month ago the Finance Ministry has advised government officials to refrain from using generative AI tools on official devices for fear of leakage of &lt;a href="https://www.thehindu.com/sci-tech/technology/indias-finance-ministry-asks-employees-to-avoid-ai-tools-like-chatgpt-deepseek/article69183180.ece"&gt;confidential information.&lt;/a&gt; &lt;strong&gt; &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt; &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;Thirdly, the subcommittee needs to assess India’s data preparedness for this scale of techno legal approach. In our study which was specific to healthcare and AI in India, where we surveyed medical professionals, hospitals and technology companies, a common understanding was that data quality in Indian datasets was an issue, and that there was somewhere reliance on data from the global north. This could be similar in other sectors as well, hence when this data is used to train the system it could lead to harms and biases.&lt;/p&gt;
&lt;h2&gt;III. GAP ANALYSIS&lt;/h2&gt;
&lt;h3&gt;A. The need to enable effective compliance and enforcement of existing laws.&lt;/h3&gt;
&lt;p&gt;The sub-committee has highlighted the importance of ensuring that the growth of AI does not lead to unfair trade practices and market dominance. It is hence important to analyse whether the existing laws on antitrust and competition, and the regulatory capacity of Competition Commission of India&amp;nbsp; are robust enough to deal with AI, and the change in technology and technology developers.&lt;/p&gt;
&lt;p&gt;There is also an urgent need to assess the issues that might come under the ambit of competition throughout the lifecycle of AI, including in areas of chip manufacturing, compute, data, models and IP. While the players could keep changing in this evolving area of technology there is a need to strengthen the existing regulatory system, before looking at techno legal measures.&lt;/p&gt;
&lt;p&gt;We suggest that before a techno legal approach is sought in all forms of governance, there is an urgent need to map the existing regulations both central and state and assess how they apply to regulating AI, and assess the capacity of existing regulatory bodies to regulate issues of AI. In the case of healthcare for example there are multiple laws, policies and guidelines, as well as regulatory bodies that apply to various stages of healthcare and various actors and at times these regulations do not refer to each other or cause duplications that could lead to &lt;a href="https://www.kas.de/documents/d/politikdialog-asien/panorama_2024-1-107-122"&gt;lack of clarity.&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;Below we are adding our comments and suggestions certain subsections in this section on &lt;strong&gt;The need to enable effective compliance and enforcement of existing laws &lt;/strong&gt;&lt;/p&gt;
&lt;h3&gt;1. Intellectual property rights&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;a. Training models on copyrighted data and liability in case of&amp;nbsp; infringement&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;While Section. 14 of the Indian Copyright Act, 1957 provides copyright holders with exclusive rights to copy and store works, considering the fact that training AI models involves making &lt;a href="https://spicyip.com/2019/08/should-indian-copyright-law-prevent-text-and-data-mining.html"&gt;non-expressive uses of work&lt;/a&gt;, a straightforward conclusion may not be drawn easily. Hence, the presumption that training models on copyrighted data constitutes infringement is premature and unfounded.&lt;/p&gt;
&lt;p&gt;&lt;em&gt;This report states “The Indian law permits a very closed list of activities in using copyrighted data&amp;nbsp; without permission that do not constitute an infringement. Accordingly, it is clear&amp;nbsp; that the scope of the exception under Section 52(1)(a)(i) of the Copyright Act,&amp;nbsp; 1957 is extremely narrow. Commercial research is not exempted; not-for-profit &lt;sup&gt;10&lt;/sup&gt; institutional research is not exempted. Not-for-profit research for personal or private use, not with the intention of gaining profit and which does not compete&amp;nbsp; with the existing copyrighted work is exempted. “ &lt;/em&gt;&lt;/p&gt;
&lt;p&gt;Indian copyright law follows a ‘hybrid’ model of limitations and exceptions under s.52(1). S. 52(1)(a), which is the ‘fair dealing’ provision, is more open-ended than the rest of the clauses in the section. Specifically, the Indian fair dealing provision permits fair dealing with any work (not being a computer programme) for the purposes of private or personal use, including research. &lt;br /&gt; &lt;br /&gt; If India is keen on indigenous AI development, specifically as it relates to foundation models, it should work towards developing frameworks for suitable exceptions ,as may be appropriate.&amp;nbsp; Lawmakers could distinguish between the different types of copyrighted works and public-interest purposes while considering the issue of infringement and liability&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;b. Copyrightability of work generated by using foundation models &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;We suggest that a public consultation would certainly be a useful exercise in ensuring opinions and issues of all stakeholders including copyright holders, authors, and users are taken into account.&lt;/p&gt;
&lt;h3&gt;C. The need for a whole-of-government approach.&lt;/h3&gt;
&lt;p&gt;While the information existing in silos is a significant issue and roadblock, if the many guidelines and existing principles have taught us anything, it is that without specificity and direct applicability it is difficult for implementers to extrapolate principles into their development, deployment and governance mechanisms.&amp;nbsp; The committee assumes a sectoral understanding from the government on various players in highly regulated sectors such as healthcare or financial services. However, as our recent study on &lt;a href="https://cis-india.org/internet-governance/blog/ai-for-healthcare-understanding-data-supply-chain-and-auditability-in-india"&gt;AI in healthcare&lt;/a&gt; indicates, there are significant information gaps when it comes to shared understanding of what data is being used for AI development, where the AI models are being developed and what kind of partnerships are being entered into, for development and deployment of AI systems. While the report also highlights the concerns about the siloed regulatory framework, it is also important to consider how the sector specific challenges lend themselves to the cross-sectoral discussion. Consider that an AI credit scoring system in financial services is leading to exclusion errors.&lt;/p&gt;
&lt;p&gt;Additionally, consider an AI system being deployed for disease diagnosis. While both use predictive AI, the nature of risk and harm are different. While there can be common and broad frameworks to potentially test efficacy of both AI models, the exact parameters for testing them would have to be unique. Therefore, it will be important to consider where bringing together cross-sectoral stakeholders will be useful and where it may need more deep work at the sector level.&lt;/p&gt;
&lt;h2&gt;IV. Recommendations&lt;/h2&gt;
&lt;h3&gt;1. To implement a whole-of-government approach to AI Governance, MeitY and the Principal Scientific Adviser should establish an empowered mechanism to coordinate AI Governance.&lt;/h3&gt;
&lt;p&gt;We would like to reiterate the earlier section and highlight the&amp;nbsp; importance of considering how the sector specific challenges lend themselves to the cross-sectoral discussion. While the whole of government approach is good as it will help building a common understanding between different government institutions, this approach might not be sufficient when it comes to AI governance. It is because this is based on the implicit assumption that internal coordination among various government bodies is enough to manage AI related risks.&lt;/p&gt;
&lt;h3&gt;2.To develop a systems-level understanding of India’s AI&amp;nbsp; ecosystem, MeitY should establish, and administratively house,&amp;nbsp; a Technical Secretariat to serve as a technical advisory body&amp;nbsp; and coordination focal point for the Committee/ Group.&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt; &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;The Subcommittee report states at this stage, it is not recommended to establish a Committee/ Group or its Secretariat as statutory authorities, as making such a decision requires significant analysis of gaps, requirements, and possible unintended outcomes. While these are valid considerations, it is necessary that there are adequate checks and balances in place. If the secretariat is placed within MeitY then safeguards must be in place to ensure that officials have autonomy in decision making.&amp;nbsp; The subcommittee suggests that MeitY can bring officials on deputation from other departments. Similarly the committee proposes bringing experts from the industry, while it is important for informed policy making,&amp;nbsp; there is also risk of &lt;a href="https://papers.ssrn.com/sol3/papers.cfm?abstract_id=4931927"&gt;regulatory capture&lt;/a&gt;. Setting a cap on the percentage of industry representatives and full disclosure of affiliations of experts involved are some of the safeguards which can be considered. We also suggest that members of civil society are also considered for this Secretariat.&lt;/p&gt;
&lt;h3&gt;3.To build evidence on actual risks and to inform harm mitigation,&amp;nbsp; the Technical Secretariat should establish, house, and operate&amp;nbsp; an AI incident database as a repository of problems&amp;nbsp; experienced in the real world that should guide responses to&amp;nbsp; mitigate or avoid repeated bad outcomes.&lt;/h3&gt;
&lt;p&gt;&lt;strong&gt;&lt;em&gt; &lt;/em&gt;&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;The report suggests that the technical secretariat will develop an actual incidence of AI-related risks in India. In most instances, an AI incident database will assume that an AI related unfavorable incident has already taken place, which then implies that it's no longer a potential risk but an actual harm. This recommendation takes a post-facto approach to assessing AI systems, as opposed to conducting risk assessments prior to the actual deployment of an AI system. Further, it also lays emphasis on receiving reports from public sector organizations deploying AI systems. Given that public sector organizations, in many cases, would be the deployers of AI systems as opposed to the developers, they may have limited know-how on functionality of tools and therefore the risks and harms.&lt;/p&gt;
&lt;p&gt;It is important to clarify and define what will be considered as an AI risk as this could also depend on stakeholders, for example losing clients due to an AI system for a company is a risk, and so is an individual&amp;nbsp; being denied health insurance because of AI bias.&amp;nbsp; With this understanding, while there is a need to keep an active assessment of risks and the emergence of new risks, the Technical&amp;nbsp; Secretariat could also undergo a mapping of the existing risks which have been highlighted by academia and civil society and international organisations and begin the risk database with that. In addition, the “AI incident database” should also be open to research institutions and civil society organisations similar to &lt;a href="https://oecd.ai/en/incidents"&gt;The OECD AI Incidents Monitor&lt;/a&gt;.&lt;/p&gt;
&lt;h3&gt;4. To enhance transparency and governance across the AI&amp;nbsp; ecosystem, the Technical Secretariat should engage the&amp;nbsp; industry to drive voluntary commitments on transparency&amp;nbsp; across the overall AI ecosystem and on baseline commitments&amp;nbsp; for high capability/widely deployed systems.&lt;/h3&gt;
&lt;p&gt;It is commendable that the sub committee in this report extends the transparency requirement to the government, with the example of law enforcement. This would create more trust in the systems and also add the responsibility on the companies providing these services to be compliant with existing laws and regulations.&lt;/p&gt;
&lt;p&gt;While the transparency measures listed will ensure better understanding of processes of&amp;nbsp; AI developers and deployers, there is also a need to bring in responsibility along with transparency. While this report also mentions ‘peer review by third parties’, we would also like to suggest auditing as a mechanism to undertake transparency and responsibility. In our study on &lt;a href="https://cis-india.org/internet-governance/blog/ai-for-healthcare-understanding-data-supply-chain-and-auditability-in-india-pdf"&gt;AI data supply chain &amp;amp; auditability and healthcare in India&lt;/a&gt;, (which surveyed 150 medical professionals, 175 respondents from healthcare institutions and 175 respondents from technology companies); revealed that 77 percent of healthcare institutions and 64 percent of the technology companies surveyed for this study, conducted audits or evaluations of the privacy and security measures for data.&lt;/p&gt;
&lt;p&gt;&lt;img src="https://cis-india.org/home-images/AIGovernanceComments.png" alt="null" class="image-inline" title="AI Governance Comments" /&gt;&lt;/p&gt;
&lt;div class="visualClear"&gt;Source: CIS survey of professionals in AI and healthcare, January- April 2024. Medical professionals (n = 133); healthcare institutions (n = 162); technology companies (n = 171)&lt;/div&gt;
&lt;div class="visualClear"&gt;&amp;nbsp;&lt;/div&gt;
&lt;h3&gt;5. Form a sub-group to work with MEITY to suggest specific measures that may be considered under the proposed legislation like Digital India Act (DIA) to strengthen and harmonise the legal framework, regulatory and technical capacity and the adjudicatory set-up for the digital industries to ensure effective grievance redressal and ease of doing business.&lt;/h3&gt;
&lt;p&gt;It would be necessary to provide some clarity on where the process to the Digital India Act is currently. While there were public consultations in 2023, we have not heard about the progress in the development of the Act. The most recent discussion on the Act was in January 2025, where S Krishnan, Secretary, Ministry of Electronics and IT (MeitY), &lt;a href="https://www.financialexpress.com/life/technology-will-not-rush-in-bringing-digital-india-act-meity-secretary-3708673/"&gt;stated&lt;/a&gt; that they were in no hurry to carry forward the draft Digital India Act and regulatory framework around AI. He also stated that the existing legal frameworks were currently sufficient to handle AI intermediaries. &lt;br /&gt; &lt;br /&gt; We would also like to highlight that during the consultations on the DIA it was proposed to replace the &lt;a href="https://vidhilegalpolicy.in/blog/explained-the-digital-india-act-2023/"&gt;Information Technology Act 2000. &lt;/a&gt;It is necessary that the subcommittee give clarity on this, since if the DIA is enacted, this reports section III on GAP analysis especially around the IT Act, and Cyber Security will need to be revisited.&lt;/p&gt;
&lt;h2&gt;&lt;/h2&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/cis-comments-and-recommendations-to-report-on-ai-governance-guidelines-development'&gt;https://cis-india.org/internet-governance/blog/cis-comments-and-recommendations-to-report-on-ai-governance-guidelines-development&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Shweta Mohandas, Amrita Sengupta and Anubha Sinha</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Artificial Intelligence</dc:subject>
    

   <dc:date>2025-03-06T06:32:45Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/the-centre-for-internet-and-society2019s-comments-and-recommendations-to-the-indian-privacy-code-2018">
    <title>The Centre for Internet and Society’s Comments and Recommendations to the: Indian Privacy Code, 2018 </title>
    <link>https://cis-india.org/internet-governance/blog/the-centre-for-internet-and-society2019s-comments-and-recommendations-to-the-indian-privacy-code-2018</link>
    <description>
        &lt;b&gt;The debate surrounding privacy has in recent times gained momentum due to the Aadhaar judgement and the growing concerns around the use of personal data by corporations and governments.&lt;/b&gt;
        &lt;p&gt;Click to download the &lt;a class="external-link" href="http://cis-india.org/internet-governance/files/indian-privacy-code"&gt;file here&lt;/a&gt;&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;As India moves towards greater digitization, and technology becomes even more pervasive, there is a need to ensure the privacy of the individual as well as hold the private and public sector accountable for the use of personal data. Towards enabling public discourse and furthering the development a privacy framework for India, a group of lawyers and policy analysts backed by the Internet Freedom Foundation (IFF) have put together a draft a citizen's bill encompassing a citizen centric privacy code that is based on seven guiding principles.&lt;a href="#_ftn1"&gt;&lt;sup&gt;&lt;sup&gt;[1]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; This draft builds on the Citizens Privacy Bill, 2013 that had been drafted by CIS on the basis of a series of roundtables conducted in India.&lt;a href="#_ftn2"&gt;&lt;sup&gt;&lt;sup&gt;[2]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; Privacy is one of the key areas of research at CIS and we welcome this initiative and hope that our comments make the Act a stronger embodiment of the right to privacy.&lt;/p&gt;
&lt;h1 style="text-align: justify; "&gt;Section by Section Recommendations&lt;/h1&gt;
&lt;h2 style="text-align: justify; "&gt;Preamble&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; The Preamble specifies that the need for privacy has increased in the digital age, with the emergence of big data analytics.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; It could instead be worded as ‘with the emergence of technologies such as big data analytics’, so as to recognize the impact of multiple technologies and processes including big data analytics.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; The Preamble states that it is necessary for good governance that all interceptions of communication and surveillance be conducted in a systematic and transparent manner subservient to the rule of law.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Recommendation: The word ‘systematic’ is out of place, and can be interpreted incorrectly. It could instead be replaced with words such as ‘necessary’, ‘proportionate’, ‘specific’, and ‘narrow’, which would be more appropriate in this context.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Chapter 1&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;Preliminary&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 2: &lt;/b&gt;This Section defines the terms used in the Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Some of the terms are incomplete and a few of the terms used in the Act have not been included in the list of definitions.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendations:&lt;/b&gt;&lt;/p&gt;
&lt;ul style="text-align: justify; "&gt;
&lt;li&gt;The term “effective consent” needs to be defined. The term is first used in the Proviso to Section 7(2), which states “Provided that effective consent can only be said to have been obtained where...:”It is crucial that the Act defines effective consent especially when it is with respect to sensitive data.&lt;/li&gt;
&lt;li&gt;The term “open data” needs to be defined. The term is first used in Section 5 that states the exemptions to the right to privacy. Subsection 1 clause ii states as follows “the collection, storage, processing or dissemination by a natural person of personal data for a strictly non-commercial purposes which may be classified as open data by the Privacy Commission”. Hence the term open data needs to be defined in order to ensure that there is no ambiguity in terms of what open data means.&lt;/li&gt;
&lt;li&gt;The Act does not define “erasure”, although the term erasure does come under the definition of destroy (Section 2(1)(p)). There are some provisions that use the word erasure , hence if erasure and destruction mean different acts then the term erasure needs to be defined, otherwise in order to maintain uniformity the sections where erasure is used could be substituted with the term “destroy” as defined under this Act.&lt;/li&gt;
&lt;li&gt;The definition of “sensitive personal data” does not include location data and identification numbers. The definition of sensitive data must include location data as the Act also deals in depth with surveillance. With respect to identification numbers, the Act needs to consider identification numbers (eg. the Aadhaar number, PAN number etc.) as sensitive information as this number is linked to a person's identity and can reveal sensitive personal data such as name, age, location, biometrics etc. Example can be taken from Section 4(1) of the GDPR&lt;a href="#_ftn3"&gt;&lt;sup&gt;&lt;sup&gt;[3]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; which identifies location data as well as identification numbers as sensitive personal data along with other identifies such as biometric data, gender race etc.&lt;/li&gt;
&lt;li&gt;The Act defines consent as the “unambiguous indication of a data subject’s agreement” however, the definition does not indicate that there needs to be an informed consent. Hence the revised definition could read as follows “the informed and unambiguous indication of a data subject’s agreement”. It is also unclear how this definition of consent relates to ‘effective consent’. This relationship needs to be clarified.&lt;/li&gt;
&lt;li&gt;The Act defines ‘data controller’ in Section 2(1)(l) as “ any person including appropriate government..”. In order to remove any ambiguity over the definition of the term person, the definition could specify that the term person means any natural or legal person.&lt;/li&gt;
&lt;li&gt;The Act defines ‘data processor’ in Section (2(1)(m) as “means any person including appropriate government”. In order to remove any ambiguity over the definition of the term ‘any person’, the definition could specify that the term person means any natural or legal person. &lt;/li&gt;
&lt;/ul&gt;
&lt;h2 style="text-align: justify; "&gt;CHAPTER II&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;Right to Privacy&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 5: &lt;/b&gt;This section provides exemption to the rights to privacy&lt;b&gt;. &lt;/b&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment: &lt;/b&gt;Section 5(1)(ii) states that the collection, storage, processing or dissemination by a natural person of personal data for a strictly non-commercial purposes are exempted from the provisions of the right to privacy. This clause also states that this data may be classified as open data by the Privacy Commission. This section hence provides individuals the immunity from collection, storage, processing and dissemination of data of another person. However this provision fails to state what specific activities qualify as non commercial use.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;This provision could potentially be strengthened by specifying that the use must be in the public interest. The other issue with this subsection is that it fails to define open data. If open data was to be examined using its common definition i.e “data that can be freely used, modified, and shared by anyone for any purpose”&lt;a href="#_ftn4"&gt;&lt;sup&gt;&lt;sup&gt;[4]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; then this section becomes highly problematic. As a simple interpretation would mean that any personal data that is collected, stored, processed or disseminated by a natural person can possibly become available to anyone. Beyond this, India has an existing framework governing open data. Ideally the privacy commissioner could work closely with government departments to ensure that open data practices in India are in compliance with the privacy law.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;CHAPTER III&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;Protection of Personal Data&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;PART A&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Notice by data controller &lt;/b&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 6: &lt;/b&gt;This section specifies the obligations to be followed by data controllers in their communication, to maintain transparency and lays down provisions that all communications by Data Controllers need to be complied with.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; There seems to be a error in the &lt;i&gt;Proviso &lt;/i&gt;to this section. The proviso states “Provided that all communications by the Data Controllers including but not limited to the rights of Data Subjects under this part &lt;b&gt;shall may be &lt;/b&gt;refused when the Data Controller is, unable to identify or has a well founded basis for reasonable doubts as to the identity of the Data Subject or are manifestly unfounded, excessive and repetitive, with respect to the information sought by the Data Subject ”.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;The proviso could read as follows “The proviso states “Provided that all communications by the Data Controllers including but not limited to the rights of Data Subjects under this part &lt;b&gt;&lt;i&gt;may&lt;/i&gt;&lt;/b&gt; be refused when the Data Controller is…”. We suggest the use of the ‘may’ as this makes the provision less limiting to the rights of the data controller.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Additionally, it is not completely clear what ‘included but not limited to...’ would entail. This could be clarified further.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;PART B&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;CONSENT OF DATA SUBJECTS&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 10: &lt;/b&gt;This section talks about the collection of personal data.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 10(3) lays down the information that a person must provide before collecting the personal data of an individual.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 10(3)(xi) states as follows “the time and manner in which it will be destroyed, or the criteria used to Personal data collected in pursuance of a grant of consent by the data subject to whom it pertains shall, if that consent is subsequently withdrawn for any reason, be destroyed forthwith: determine that time period;”. There seems to be a problem with the sentence construction and the rather complex sentence is difficult to understand.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; This section could be reworked in such as way that two conditions are clear, one - the time and manner in which the data will be destroyed and two the status of the data once consent is withdrawn.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 10(3)(xiii) states that the identity and contact details of the data controller and data processor must be provided. However it fails to state that the data controller should provide more details with regard to the process for grievance redressal. It does not provide guidance on what type of information needs to go into this notice and the process of redressal. This could lead to very broad disclosures about the existence of redress mechanisms without providing individuals an effective avenue to pursue.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;As part of the requirement for providing the procedure for redress, data controllers could specifically be required to provide the details of the Privacy Officers, privacy commissioner, as well as provide more information on the redressal mechanisms and the process necessary to follow.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 11:&lt;/b&gt;This section lays out the provisions where collection of personal data without prior consent is possible.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 11 states “Personal data may be collected or received from a third party by a Data Controller the prior consent of the data subject only if it is:..”. However as the title of the section suggests the sentence could indicate the situations where it is permissible to collect personal data without prior consent from the data subject”. Hence the word “without” is missing from the sentence. Additionally the sentence could state that the personal data may be collected or received directly from an individual or from a third party as it is possible to directly collect personal data from an individual without consent.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt;The sentence could read as “Personal data may be collected or received from an &lt;b&gt;individual or a third party &lt;/b&gt;by a Data Controller &lt;b&gt;&lt;i&gt;without&lt;/i&gt;&lt;/b&gt; the prior consent of the data subject only if it is:..”.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 11(1)(i) states that the collection of personal data without prior consent when it is “necessary for the provision of an emergency medical service or essential services”. However it does not specify the kind or severity of the medical emergency.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;In addition to medical emergency another exception could be made for imminent threats to life.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 12: &lt;/b&gt;This section details the Special provisions in respect of data collected prior to the commencement of this Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; This section states that all data collected, processed and stored by data controllers and data processors prior to the date on which this Act comes into force shall be destroyed within a period of two years from the date on which this Act comes into force. Unless consent is obtained afresh within two years or that the personal data has been anonymised in such a manner to make re-identification of the data subject absolutely impossible. However this process can be highly difficult and impractical in terms of it being time consuming, expensive particularly, in cases of analog collections of data. This is especially problematic in cases where the controller cannot seek consent of the data subject due to change in address or inavailability or death. This will also be problematic in cases of digitized government records.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; We suggest three ways in which the issue of data collected prior to the Act can be handled. One way is to make a distinction on the data based on whether the data controller has specified the purpose of the collection before collecting the data. If the purpose was not defined then the data can be deleted or anonymised. Hence there is no need to collect the data afresh for all the cases. The purpose of the data can also be intimated to the data subject at a later stage and the data subject can choose if they would like the controller to store or process the data.The second way is by seeking consent afresh only for the sensitive data. Lastly, the data controller could be permitted to retain records of data, but must necessarily obtain fresh consent before using them. By not having a blanket provision of retrospective data deletion the Act can address situations where deletion is complicated or might have a potential negative impact by allowing storage, deletion, or anonymisation of data based on its purpose and kind.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section (2)(1)(i) of the Act states that the data will not be destroyed provided that &lt;b&gt;effective consent&lt;/b&gt; is obtained afresh within two years. However as stated earlier the Act does not define effective consent.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Recommendation: The term &lt;b&gt;effective consent &lt;/b&gt;needs to be defined in order to bring clarity to this provision.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;PART C&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;FURTHER LIMITATIONS ON DATA CONTROLLERS&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 16: &lt;/b&gt;This section deals with the security of personal data and duty of confidentiality.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 16(2) states “ Any person who collects, receives, stores, processes or otherwise handles any personal data shall be subject to a duty of confidentiality and secrecy in respect of it.” Similarly Section 16(3) states “data controllers and data processors shall be subject to a duty of confidentiality and secrecy in respect of personal data in their possession or control. However apart from the duty of confidentiality and secrecy the data collectors and processors could also have a duty to maintain the security of the data.” Though it is important for confidentiality and secrecy to be maintained, ensuring security requires adequate and effective technical controls to be in place.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; This section could also emphasise on the duty of the data controllers to ensure the security of the data. The breach notification could include details about data that is impacted by a breach or attach as well as the technical details of the infrastructure compromised.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 17:&lt;/b&gt; This section details the conditions for the transfer of personal data outside the territory of India.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 17 allows a transfer of personal data outside the territory of India in 3 situations- If the Central Government issues a notification deciding that the country/international organization in question can ensure an adequate level of protection, compatible with privacy principles contained in this Act; if the transfer is pursuant to an agreement which binds the recipient of the data to similar or stronger conditions in relation to handling the data; or if there are appropriate legal instruments and safeguards in place, to the satisfaction of the data controller. However, there is no clarification for what would constitute ‘adequate’ or ‘appropriate’ protection, and it does not account for situations in which the Government has not yet notified a country/organisation as ensuring adequate protection. In comparison, the GDPR, in Chapter V&lt;a href="#_ftn5"&gt;&lt;sup&gt;&lt;sup&gt;[5]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt;, contains factors that must be considered when determining adequacy of protection, including relevant legislation and data protection rules, the existence of independent supervisory authorities, and international commitments or obligations of the country/organization. Additionally, the GDPR allows data transfer even in the absence of the determination of such protection in certain instances, including the use of standard data protection clauses, that have been adopted or approved by the Commission; legally binding instruments between public authorities; approved code of conduct, etc. Additionally, it allows derogations from these measures in certain situations: when the data subject expressly agrees, despite being informed of the risks; or if the transfer is necessary for conclusion of contract between data subject and controller, or controller and third party in the interest of data subject; or if the transfer is necessary for reasons of public interest, etc. No such circumstances are accounted for in Section 17.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;Additionally, data controllers and processors could be provided with a period to allow them to align their policies towards the new legislation. Making these provisions operational as soon as the Act is commenced might put the controllers or processors guilty of involuntary breaching the provisions of the Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 19: &lt;/b&gt;This section&lt;b&gt; &lt;/b&gt;states the special provisions for sensitive personal data.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 19(2) states that in addition to the requirements set out under sub-clause (1), the Privacy Commission shall set out additional protections in respect of:i.sensitive personal data relating to data subjects who are minors; ii.biometric and deoxyribonucleic acid data; and iii.financial and credit data.This however creates additional categories of sensitive data apart from the ones that have already been created.&lt;a href="#_ftn6"&gt;&lt;sup&gt;&lt;sup&gt;[6]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; These additional categories can result in confusion and errors.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;Sensitive data must not be further categorised as this can lead to confusion and errors. Hence all sensitive data could be subject to the same level of protection.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 20:&lt;/b&gt; This section states the special provisions for data impact assessment.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; This section states that all data impact assessment reports will be submitted periodically to the State Privacy commission. This section does not make provisions for instances of circumstances in which such records may be made public. Additionally the data impact assessment could also include a human rights impact assessment.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; The section could also have provisions for making the records of the impact assessment or relevant parts of the assessment public. This will ensure that the data controllers / processors are subjected to a standard of accountability and transparency. Additionally as privacy is linked to human rights the data impact assessment could also include a human rights impact assessment. The Act could further clarify the process for submission to State Privacy Commissions and potential access by the Central Privacy Commission to provide clarity in process.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Section 20 requires controllers who use new technology to assess the risks to the data protection rights that occur from processing. ‘New technology’ is defined to include pre-existing technology that is used anew. Additionally, the reports are required to be sent to the State Privacy Commission periodically. However, there is no clarification on the situations in which such an assessment becomes necessary, or whether all technology must undergo such an assessment before their use. Additionally, the differentiation between different data processing activities based on whether the data processing is incidental or a part of the functioning needs to be clarified. This differentiation is necessary as there are some data processors and controllers who need the data to function; for instance an ecommerce site would require your name and address to deliver the goods, although these sites do not process the data to make decisions. This can be compared to a credit rating agency that is using the data to make decisions as to who will be given a loan based on their creditworthiness. Example can taken from the GDPR, which in Article 35, specifies instances in which a data impact assessment is necessary: where a new technology, that is likely to result in a high risk to the rights of persons, is used; where personal aspects related to natural persons are processed automatically, including profiling; where processing of special categories of data (including data revealing ethnic/racial origin, sexual orientation etc), biometric/genetic data; where data relating to criminal convictions is processed; and with data concerning the monitoring of publicly accessible areas. Additionally, there is no requirement to publish the report, or send it to the supervising authority, but the controller is required to review the processor’s operations to ensure its compliance with the assessment report.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; The reports could be sent to a central authority, which according to this Act is the Privacy Commission, along with the State Privacy Commission. Additionally there needs to be a differentiation between the incidental and express use of data. The data processors must be given at least a period of one year after the commencement of the Act to present their impact assessment report. This period is required for the processors to align themselves with the provisions of the Act as well as conduct capacity building initiatives.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;PART C&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;RIGHTS OF A DATA SUBJECT&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 21: &lt;/b&gt;This section explains the right of the data subject with regard to accessing her data. It states that the data subject has the right to obtain from the data controller information as to whether any personal data concerning her is collected or processed. The data controller also has to not only provide access to such information but also the personal data that has been collected or processed.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; This section does not provide the data subject the right to seek information about security breaches.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;This section could state that the data subject has the right to seek information about any security breaches that might have compromised her data (through theft, loss, leaks etc.). This could also include steps taken by the data controller to address the immediate breach as well as steps to minimise the occurrence of such breaches in the future.&lt;a href="#_ftn7"&gt;&lt;sup&gt;&lt;sup&gt;[7]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;CHAPTER IV&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;INTERCEPTION AND SURVEILLANCE&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 28: &lt;/b&gt;This section lists out the special provisions for competent organizations.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 28(1) states ”all provisions of Chapter III shall apply to personal data collected, processed, stored, transferred or disclosed by competent organizations unless when done as per the provisions under this chapter ”.This does not make provisions for other categories of data such as sensitive data.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; This section needs to include not just personal data but also sensitive data, in order to ensure that all types of data are protected under this Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 30:&lt;/b&gt; This section states the provisions for prior authorisation by the appropriate Surveillance and Interception Review Tribunal.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 30(5) states “any interception involving the infringement of the privacy of individuals who are not the subject of the intended interception, or where communications relate to &lt;b&gt;medical, journalistic, parliamentary or legally privileged material&lt;/b&gt; may be involved, shall satisfy additional conditions including the provision of specific prior justification in writing to the Office for Surveillance Reform of the Privacy Commission as to the necessity for the interception and the safeguards providing for minimizing the material intercepted to the greatest extent possible and the destruction of all such material that is not strictly necessary to the purpose of the interception.” This section needs to state why these categories of communication are more sensitive than others. Additionally, interceptions typically target people and not topics of communication - thus medical may be part of a conversation between two construction workers and a doctor will communicate about finances.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; The section could instead of singling out “medical, journalistic, parliamentary or legally privileged material” state that “any interception involving the infringement of the privacy of individuals who are not the subject of the intended interception may be involved, shall satisfy additional conditions including the provision of specific prior justification in writing to the Office for Surveillance Reform of the Privacy Commission.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 37&lt;/b&gt;: This section details the bar against surveillance.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment: &lt;/b&gt;Section 37(1) states that “no person shall order or carry out, or cause or assist the ordering or carrying out of, any surveillance of another person”. The section also prohibits indiscriminate monitoring, or mass surveillance, unless it is necessary and proportionate to the stated purpose. However, it is unclear whether this prohibits surveillance by a resident of their own residential property, which is allowed in Section 5, as the same could also fall within ‘indiscriminate monitoring/mass surveillance’. For instance, in the case of a camera installed in a residential property, which is outward facing, and therefore captures footage of the road/public space.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation:&lt;/b&gt; The Act needs to bring more clarity with regard to surveillance especially with respect to CCTV cameras that are installed in private places, but record public spaces such as public roads. The Act could have provisions that clearly define the use of CCTV cameras in order to ensure that cameras installed in private spaces are not used for carrying out mass surveillance. Further, the Act could address the use of emerging techniques and technology such as facial recognition technologies, that often rely on publicly available data.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;CHAPTER V&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;THE PRIVACY COMMISSION&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Section 53:&lt;/b&gt; This section details the powers and functions of the Privacy Commission.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; Section 53(2)(xiv) states that the Privacy Commission shall publish periodic reports “providing description of performance, findings, conclusions or recommendations of any or all of the functions assigned to the Privacy Commission”. However this Section does not make provisions for such reporting to happen annually and to make them publicly available, as well as contain details including financial aspects of matters contained within the Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Recommendation: &lt;/b&gt;The functions could include a duty to disclose the information regarding the functioning and financial aspects of matters contained within the Act. Categories that could be included in such reports include: the number of data controllers, number of data processors, number of breaches detected and mitigated etc.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;CHAPTER IX&lt;/h2&gt;
&lt;h2 style="text-align: justify; "&gt;OFFENCES AND PENALTIES&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt; Sections 73 to 80:&lt;/b&gt; These sections lay out the different punishments for controlling and processing data in contravention to the provisions of this Act.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Comment:&lt;/b&gt; These sections, while laying out different punishments for controlling and processing data in contravention to the provisions of this Act, mets out a fine extending upto Rs. 10 crore. This is problematic as it does not base these penalties on the finer aspects of proportionality, such as  offences that are not as serious as the others.&lt;br /&gt; &lt;br /&gt; &lt;b&gt;Recommendation:&lt;/b&gt; There could be a graded approach to the penalties based on the degree of severity of the offence.This could be in the form of name and shame, warnings and penalties that can be graded based on the degree of the offence. &lt;br /&gt; ----------------------------------------------------------------------&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Additional thoughts: As India moves to a digital future there is a need for laws to be in place to ensure that individual's rights are not violated. By riding on the push to digitization, and emerging technologies such as AI, a strong all encompassing privacy legislation can allow India to leapfrog and use these emerging technologies for the benefit of the citizens without violating their privacy. A robust legislation can also ensure a level playing field for data driven enterprises within a framework of openness, fairness, accountability and transparency.&lt;/p&gt;
&lt;hr style="text-align: justify; " /&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref1"&gt;&lt;sup&gt;&lt;sup&gt;[1]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; These seven principles include: Right to Access, Right to Rectification, Right to Erasure And Destruction of Personal Data,Right to Restriction Of Processing, Right to Object, Right to Portability of Personal Data,Right to Seek Exemption from Automated Decision-Making.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref2"&gt;&lt;sup&gt;&lt;sup&gt;[2]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt;The Privacy (Protection) Bill 2013: A Citizen’s Draft, Bhairav Acharya, Centre for Internet &amp;amp; Society, https://cis-india.org/internet-governance/blog/privacy-protection-bill-2013-citizens-draft&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref3"&gt;&lt;sup&gt;&lt;sup&gt;[3]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt;General Data Protection Regulation, available at https://gdpr-info.eu/art-4-gdpr/.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref4"&gt;&lt;sup&gt;&lt;sup&gt;[4]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; Antonio Vetro, Open Data Quality Measurement Framework: Definition and Application to Open Government Data, available at https://www.sciencedirect.com/science/article/pii/S0740624X16300132&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref5"&gt;&lt;sup&gt;&lt;sup&gt;[5]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; General Data Protection Regulation, available at https://gdpr-info.eu/chapter-5/.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref6"&gt;&lt;sup&gt;&lt;sup&gt;[6]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; Sensitive personal data under Section 2(bb) includes, biometric data; deoxyribonucleic acid data;&lt;br /&gt; sexual preferences and practices;medical history and health information;political affiliation;&lt;br /&gt; membership of a political, cultural, social organisations including but not limited to a trade union as defined under Section 2(h) of the Trade Union Act, 1926;ethnicity, religion, race or caste; and&lt;br /&gt; financial and credit information, including financial history and transactions.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="#_ftnref7"&gt;&lt;sup&gt;&lt;sup&gt;[7]&lt;/sup&gt;&lt;/sup&gt;&lt;/a&gt; Submission to the Committee of Experts on a Data Protection Framework for India, Amber Sinha, Centre for Internet &amp;amp; Society, available at https://cis-india.org/internet-governance/files/data-protection-submission&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/the-centre-for-internet-and-society2019s-comments-and-recommendations-to-the-indian-privacy-code-2018'&gt;https://cis-india.org/internet-governance/blog/the-centre-for-internet-and-society2019s-comments-and-recommendations-to-the-indian-privacy-code-2018&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Shweta Mohandas, Elonnai Hickok, Amber Sinha and Shruti Trikanand</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Aadhaar</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2018-07-20T13:55:46Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/cis-comments-and-feedback-to-digital-personal-data-protection-rules-2025">
    <title>The Centre for Internet and Society’s comments and feedback to the: Digital Personal Data Protection Rules 2025</title>
    <link>https://cis-india.org/internet-governance/blog/cis-comments-and-feedback-to-digital-personal-data-protection-rules-2025</link>
    <description>
        &lt;b&gt;The Centre for Internet &amp; Society (CIS) submitted its comments and feedback to the Digital Personal Data Protection Rules 2025 initiated by the Indian government.&lt;/b&gt;
        &lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 3 - Notice given by data fiduciary to data principal&lt;/span&gt;&lt;/b&gt; - Under Section 5(2) of the DPDP Act, when the personal data of the data principal has been processed before the commencement of the Act, then the data fiduciary is required to give notice to the data principal as soon as reasonably practicable. However, the Rules fail to specify what is meant by reasonably practicable. The timeline for a notice in such circumstances is unclear.&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;In addition, under Rule 3(a) the phrase “be presented and be understandable independently” is ambiguous. It is not clear whether the consent notice has to be presented independently of any other information or whether it only needs to be independently understandable and can be presented along with other information. &lt;/li&gt;
&lt;li&gt;In addition to this we suggest that the need for “privacy by design” mentioned in the earlier drafts is brought back, with the focus on preventing deceptive design practices (dark patterns)  being used while collecting data. &lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;br /&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 4 - Registration and obligations of Consent Manager&lt;/span&gt;&lt;/b&gt;- The concept of independent consent managers, similar to account aggregators in the financial sector, and consent manager platforms in the EU is a positive step. However, the Act and the Rules need to flesh out the interplay between the Data Fiduciary and the Consent Managers in a more detailed manner, for example, how does the data fiduciary know if a data principal is using a consent manager, and under what circumstances can the data fiduciary bypass the consent manager, what is the penalty/consequence, etc.&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 6 - Reasonable security safeguards&lt;/span&gt;&lt;/b&gt; - While we appreciate the guidance provided in terms of the measures for security such as “encryption, obfuscation or masking or the use of virtual tokens”, it would also be good to refer to the SPDI Rules and include the example of the The international Standard IS/ISO/IEC 27001 on Information Technology - Security Techniques - Information Security Management System as an illustration to guide data fiduciaries.&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 7 - Intimation of personal data breach&lt;/span&gt;&lt;/b&gt; - As per the Rules, the data fiduciary on becoming aware of any personal data breach is required to notify the data principal and the Data Protection Board without delay; a plain reading of this Rule suggests that data fiduciary has to report the breach almost immediately, and this could be a practical challenge. Further, the absence of any threshold (materiality, gravity of the breach, etc) for notifying the data principal means that the data fiduciary will have to inform the data principal about even an isolated data breach which may not have an impact on the data principal. In this context, we recommend the Rule be amended to state that the data fiduciary should be required to inform the Data Protection Board about every data breach, however the data principal should be informed depending on the gravity and materiality of the breach and when it is likely to result in high risk to the data principal.&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Whilst the Rules have provisions for intimation of data breach, there is no specific provision requiring the Data Fiduciary to take all steps necessary to ensure that the Data Fiduciary has taken all necessary measures to mitigate the risk arising out of the said breach. Although there is an obligation to report any such measures to the Data Principal (Rule 7(1)(c)) as well as to the DPBI (Rule 7(2)(b)(iii)), there is no positive obligation imposed on the Data Fiduciary to take any such mitigation measures. The Rules and the Act merely presume that the Data Fiduciary would take mitigation measures, perhaps that is the reason why there are notification requirements for such breach, however the Rules and the Act do not put any positive obligation on the Data Fiduciary to actually implement such measures. This would lead to a situation where a Data Fiduciary may not take any measures to mitigate the risks arising out of the data breach, and be in compliance with its legal obligations by merely notifying the Data Principal as well as the DPBI that no measures have been taken to mitigate the risks arising from the data breach. In addition, the SPDI Rules state that in an event of a breach the body corporate is required to demonstrate that they had implemented reasonable security standards. This provision could be incorporated in this Rule to emphasize on the need to implement robust security standards which is one of the ways to curb data breaches from happening, and ensure that there is a protocol to mitigate the breach.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 10 - Verifiable consent for processing of personal data of child or of person with disability who has a lawful guardian&lt;/span&gt;&lt;/b&gt; - The two mechanisms provided under the Rules to verify the age and identity of parents pre-suppose a high degree of digital literacy on the part of the parents. They may either give or refuse consent without thinking too much about the consequences arising out of giving or not giving consent. As there is always a risk of individuals not providing the correct information regarding their age or their relationship with the child, platforms may have to verify every user’s age; thereby preventing users from accessing the platform anonymously. Further, there is also a risk of data maximisation of personal data rather than data minimisation; i.e parents may be required to provide far more information than required to prove their identity. One recommendation/suggestion that we propose is to remove the processing of children's personal data from the ambit of this law, and instead create a separate standalone legislation dealing with children’s digital rights. Another important issue to highlight here is the importance of the Digital Protection Board and its capacity to levy fines and impose strictures on the platforms. We have seen from examples from other countries that platforms are forced to redesign and provide for better privacy and data protection mechanisms when the regulator steps in and imposes high penalties&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 12 - Additional obligations of Significant Data Fiduciary&lt;/span&gt;&lt;/b&gt; - The Rules do not clarify which entities will be considered as a Significant Data Fiduciary, leaving that to the government notifications. This creates uncertainty for data fiduciaries, especially smaller organisations that might not be able to set up the mechanisms and people for conducting data protection impact assessment, and auditing. The Rule provides that SDFs will have to conduct an annual Data Protection Impact Assessment. While this is a step in the right direction, the Rules are currently silent on the granularity of the DPIA. Similarly for “audit” the Rules do not clarify what type of audit is needed and what the parameters are. It is therefore imperative that the government notifies the level of details that the DPIA and the audit need to go into in order to ensure that the SDFs actually address issues where their data governance practices are lacking and not use the DPIA as a whitewashing tactic.There is also a  need to reduce some of the ambiguity with regards to the parameters, and responsibilities in order to make it easier for startups and smaller players to comply with the regulations.  In addition, while there is a need to protect data and increase responsibility on organisations collecting sensitive data or large volumes of data, there is a need to look beyond compliance and look at ways that preserve the rights of the data principal. Hence significant data fiduciaries should also be given the added responsibility of collecting explicit consent from the data principal, and also have easier access for correction of data, grievance redressal and withdrawal of consent.&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 14 - Processing of personal data outside India&lt;/span&gt;&lt;/b&gt; - As per section 16 of the Act the government could, by notification, restrict the transfer of data to specific countries as notified. This system of a negative list envisaged under the Act appears to have been diluted somewhat by the use of the phrase “any foreign State” under the Rules. This ambiguity should be addressed and the language in the Rules may be altered to bring it in line with the Act. Further, the rules also appear to be ultra vires to the Act. As per the DPDP Act, personal data could be shared to outside India, except to countries which were on the negative list, however, the dilution of the provision through the rules appears to have now created a white list of countries; i.e. permissible list of countries to which data can be transferred.&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 15 Exemption from Act for research, archiving or statistical purposes&lt;/span&gt;- &lt;/b&gt;While creating an exception for research and statistical purposes is an understandable objective, the current wording of the provision is vague and subject to mischief. The objective behind the provision is to ensure that research activities are not hindered due to the requirements of taking consent, etc. as required under the Act. However the way the provision is currently drafted, it could be argued that a research lab or a research centre established by a large company, for e.g. Google, Meta, etc. could also seek exemptions from the provisions of this Act for conducting “research”. The research conducted may not be shared with the public in general and may be used by the companies that funded/established the research centre. Therefore there should be further conditions attached to this provision, that would keep such research centers outside the purview of the exemption. Conditions such as making the results of the research publicly available, public interest, etc. could be considered for this purpose.&lt;/p&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Rule 22 - Calling for Information from data fiduciary or intermediary&lt;/span&gt; - &lt;/b&gt;This rule read with the seventh schedule appears to dilute the data minimisation and purpose limitation provisions provided for in the Act. The wide ambit of powers appears to be in contravention of the Supreme Court judgement in the Puttaswamy case, which places certain restrictions on the government while collecting personal data. This “omnibus” provision flouts guardrails like necessity and proportionality that are important to safeguard the fundamental right to privacy.&lt;/p&gt;
&lt;p&gt;It should be clarified whether this rule is merely an enabling provision to facilitate sharing of information, and only designated competent authorities as per law can avail of this provision. &lt;span style="text-decoration: underline;"&gt;Need for Confidentiality &lt;/span&gt;&lt;/p&gt;
&lt;p&gt;Additionally, the rule mandates that the government may “require the Data Fiduciary or intermediary to not disclose” any request for information made under the Act. There is no requirement of confidentiality indicated in the governing section, i.e. section 36, from which Rule 22 derives its authority. Talking about the avoidance of secrecy in government business, the Supreme Court in the State of U.P. v. Raj Narain, (1975) 4 SCC 428 has held that &lt;br /&gt; &lt;i&gt;“In a government of responsibility like ours, where all the agents of the public must be responsible for their conduct, there can but few secrets. The people of this country have a right to know every public act, everything, that is done in a public way, by their public functionaries. They are entitled to know the particulars of every public transaction in all its bearing. The right to know, which is derived from the concept of freedom of speech, though not absolute, is a factor which should make one wary, when secrecy is claimed for transactions which can, at any rate, have no repercussions on public security (2). To cover with [a] veil [of] secrecy the common routine business, is not in the interest of the public. Such secrecy can seldom be legitimately desired. It is generally desired for the purpose of parties and politics or personal self-interest or bureaucratic routine. The responsibility of officials to explain and to justify their acts is the chief safeguard against oppression and corruption.” &lt;/i&gt;&lt;br /&gt; In order to ensure that state interests are also protected, there may be an enabling provision whereby in certain instances confidentiality may be maintained, but there has to be a supervisory mechanism whereby such action may be judged on the anvil of legal propriety.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/cis-comments-and-feedback-to-digital-personal-data-protection-rules-2025'&gt;https://cis-india.org/internet-governance/blog/cis-comments-and-feedback-to-digital-personal-data-protection-rules-2025&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Pallavi Bedi, Vipul Kharbanda, Shweta Mohandas, Anubha Sinha and Isha Suri</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Privacy</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Data Governance</dc:subject>
    
    
        <dc:subject>Data Protection</dc:subject>
    
    
        <dc:subject>Data Management</dc:subject>
    

   <dc:date>2025-03-06T02:06:44Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/">
    <title>[···]</title>
    <link>https://cis-india.org/internet-governance/blog/</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/'&gt;https://cis-india.org/internet-governance/blog/&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>kaeru</dc:creator>
    <dc:rights></dc:rights>


   <dc:date>2025-11-19T17:19:28Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/cis-joins-gni">
    <title>The Centre for Internet &amp; Society Joins the Global Network Initiative</title>
    <link>https://cis-india.org/internet-governance/cis-joins-gni</link>
    <description>
        &lt;b&gt;The Global Network Initiative (GNI) is pleased to announce its newest member, the Centre for Internet &amp; Society based in Bangalore, India. A technology policy research institute, CIS brings to GNI in-depth expertise on global Internet governance as well as online freedom of  expression and privacy in India.&lt;/b&gt;
        
&lt;p&gt;"We are delighted to add our first member based in India and welcome CIS’s engagement in support of transparency and accountability in technology," says GNI Executive Director Susan Morgan. "GNI's Principles for responsible company behavior apply globally, but require an appreciation of unique local contexts if they are to take hold. CIS will provide invaluable insight as we consider opportunities to work with India's burgeoning ICT industry."&lt;/p&gt;
&lt;p&gt;"India’s ICT sector is one of the most dynamic worldwide, " says CIS Executive Director Sunil Abraham, "but rapid technological advances have raised anxieties around issues including hate speech, political criticism, and obscene content at a time when Indian institutions for the protection of free expression are under strain. We look forward to working with GNI's member organizations on these challenging issues."&lt;/p&gt;
&lt;p&gt;CIS an independent, non-profit, research organization which is involved in research on the emerging field of the Internet and its relationship to the society, CIS brings together scholars, academics, students, programmers and scientists to engage in a large variety of Internet issues. CIS also runs different academic and research programs and is receptive to new ideas and collaborations, projects and campaigns for the public.&lt;/p&gt;
&lt;p&gt;Leslie Harris, GNI Board Member and President and CEO of the Center for Democracy and Technology says: "The addition of CIS not only increases GNI’s global reach, it significantly enhances the initiative’s capacity around shared learning and policy engagement, not just in India, but on internet policy around the world."&lt;/p&gt;
&lt;p&gt;&lt;a class="external-link" href="http://www.globalnetworkinitiative.org/newsandevents/CIS_Joins.php"&gt;Click to read the original published on the Global Network Initiative website&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/cis-joins-gni'&gt;https://cis-india.org/internet-governance/cis-joins-gni&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2012-04-25T09:13:50Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/central-monitoring-system-questions-to-be-asked-in-parliament">
    <title>The Central Monitoring System: Some Questions to be Raised in Parliament</title>
    <link>https://cis-india.org/internet-governance/blog/central-monitoring-system-questions-to-be-asked-in-parliament</link>
    <description>
        &lt;b&gt;The following are some model questions to be raised in the Parliament regarding the lack of transparency in the central monitoring system.&lt;/b&gt;
        &lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Preliminary&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li style="text-align: justify; "&gt;The Central Monitoring System (CMS) is a Central Government project to intercept communications, both voice and data, that is transmitted via telephones and the internet to, from and within India. Owing to the vast nature of this enterprise, the CMS cannot be succinctly described and the many issues surrounding this project are diverse. This Issue Brief will outline preliminary constitutional, legal and technical concerns that are presented by the CMS.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;At the outset, it must be clearly understood that no public documentation exists to explain the scope, functions and technical architecture of the CMS. This lack of transparency is the single-largest obstacle to understanding the Central Government’s motives in conceptualising and operationalizing the CMS. This lack of public documentation is also the chief reason for the brevity of this Issue Note. Without making public the policy, law and technical abilities of the CMS, there cannot be an informed national debate on the primary concerns posed by the CMS, i.e the extent of envisaged state surveillance upon Indian citizens and the safeguards, if any, to protect the individual right to privacy. &lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Surveillance and Privacy&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li style="text-align: justify; "&gt;Surveillance is necessary to secure political organisation. Modern nation-states, which are theoretically organised on the basis of shared national and societal characteristics, require surveillance to detect threats to these characteristics. In democratic societies, beyond the immediate requirements of national integrity and security, surveillance must be targeted at securing the safety and rights of individual citizens. This Issue Brief does not dispute the fact that democratic countries, such as India, should conduct surveillance to secure legitimate ends. Concerns, however, arise when surveillance is conducted in a manner unrestricted and unregulated by law; these concerns are compounded when a lack of law is accompanied by a lack of transparency.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Technological advancement leads to more intrusive surveillance. The evolution of surveillance in the United States resulted, in 1967, in the first judicial recognition of the right to privacy. In &lt;i&gt;Katz&lt;/i&gt; v. &lt;i&gt;United States&lt;/i&gt; the US Supreme Court ruled that the privacy of communications had to be balanced with the need to conduct surveillance; and, therefore, wiretaps had to be warranted, judicially sanctioned and supported by probable cause. &lt;i&gt;Katz&lt;/i&gt; expanded the scope of the Fourth Amendment of the US Constitution, which protected against unreasonable searches and seizures. Most subsequent US legal developments relating to the privacy of communications from surveillance originate in the &lt;i&gt;Katz&lt;/i&gt; judgement. Other common law countries, such as the United Kingdom and Canada, have experienced similar judicial evolution to recognise that the right to privacy must be balanced with governance.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;&lt;br /&gt;Right to Privacy in India&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li style="text-align: justify; "&gt;Unfortunately, India does not have a persuasive jurisprudence of privacy protection. In the &lt;i&gt;Kharak Singh&lt;/i&gt; (1964) and &lt;i&gt;Gobind&lt;/i&gt; (1975) cases, the Supreme Court of India considered the question of privacy from physical surveillance by the police in and around the homes of suspects. In the latter case, the Supreme Court found that some of the Fundamental Rights “could be described as contributing to the right to privacy” which was nevertheless subject to a compelling public interest. This insipid inference held the field until 1994 when, in the &lt;i&gt;Rajagopal&lt;/i&gt; (“Auto Shankar”, 1994) case, the Supreme Court, for the first time, directly located privacy within the ambit of the right to personal liberty recognised by Article 21 of the Constitution. However, &lt;i&gt;Rajagopal&lt;/i&gt; dealt specifically with the publication of an autobiography, it did not consider the privacy of communications. In 1997, the Supreme Court considered the question of wiretaps in the &lt;i&gt;PUCL&lt;/i&gt; case. While finding that wiretaps invaded the privacy of communications, it continued to permit them subject to some procedural safeguards which continue to be routinely ignored. A more robust statement of the right to privacy was made recently by the Delhi High Court in the &lt;i&gt;Naz &lt;/i&gt;&lt;i&gt;Foundation&lt;/i&gt; case (2011) that de-criminalised consensual homosexual acts; however, this judgment has been appealed to the Supreme Court.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;b&gt;&lt;span style="text-decoration: underline;"&gt;Issues Pertaining to the CMS&lt;/span&gt;&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li style="text-align: justify; "&gt;While judicial protection from physical surveillance was cursorily dealt with in the &lt;i&gt;Kharak Singh&lt;/i&gt; and &lt;i&gt;Gobind&lt;/i&gt; cases, the Supreme Court of India directly considered the issue of wiretaps in the &lt;i&gt;PUCL&lt;/i&gt; case. Wiretaps in India primarily occur on the strength of powers granted to certain authorities under section 5(2) of the Indian Telegraph Act, 1885. The Court found that the Telegraph Act, and Rules made thereunder, did not prescribe adequate procedural safeguards to create a “just and fair” mechanism to conduct wiretaps. Therefore, it laid down the following procedure to conduct wiretaps: &lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;/ul&gt;
&lt;p style="text-align: justify; "&gt;(a) the order should be issued by the relevant Home Secretary (this power is delegable to a Joint Secretary),&lt;br /&gt; (b) the interception must be carried out exactly in terms of the order and not in excess of it,&lt;br /&gt; (c) a determination of whether the information could be reasonably secured by other means,&lt;br /&gt; (d) the interception shall cease after sixty (60) days.&lt;/p&gt;
&lt;ul&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li style="text-align: justify; "&gt;Therefore, prima facie, any voice interception conducted through the CMS will be in violation of this Supreme Court judgement. The CMS will enforce blanket surveillance upon the entire country without regard for reasonable cause or necessity. This movement away from targeted surveillance to blanket surveillance without cause, conducted without statutory sanction and without transparency, is worrying.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Accordingly, the following questions may be raised, in Parliament, to learn more about the CMS project: &lt;/li&gt;
&lt;/ul&gt;
&lt;ol&gt;
&lt;li&gt;Which statutes, Government Orders, notifications etc deal with the establishment and maintenance of the CMS?&lt;/li&gt;
&lt;li&gt;Which is the nodal agency in charge of implementing the CMS?&lt;/li&gt;
&lt;li&gt;What are the powers and functions of the nodal agency?&lt;/li&gt;
&lt;li&gt;What guarantees exist to protect ordinary Indian citizens from intrusive surveillance without cause?&lt;/li&gt;
&lt;li&gt;What are the technical parameters of the CMS?&lt;/li&gt;
&lt;li&gt;What are the consequences for misuse or abuse of powers by any person working in the CMS project?&lt;/li&gt;
&lt;li&gt;What recourse is available to Indian citizens against whom there is unnecessary surveillance or against whom there has been a misuse or abuse of power?&lt;/li&gt;
&lt;/ol&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/central-monitoring-system-questions-to-be-asked-in-parliament'&gt;https://cis-india.org/internet-governance/blog/central-monitoring-system-questions-to-be-asked-in-parliament&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>bhairav</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Central Monitoring System</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2013-09-25T10:30:10Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/hindu-businessline-april-3-2015-sibi-arasu-the-block-heads">
    <title>The block heads</title>
    <link>https://cis-india.org/internet-governance/news/hindu-businessline-april-3-2015-sibi-arasu-the-block-heads</link>
    <description>
        &lt;b&gt;An entire government department is on the job, but can it really take down ‘offending’ online content?&lt;/b&gt;
        &lt;p class="body" style="text-align: justify; "&gt;The article by Sibi Arasu was &lt;a class="external-link" href="http://www.thehindubusinessline.com/features/blink/know/bl-ink-the-task-of-blocking-and-unblocking-websites/article7064563.ece"&gt;published in the Hindu Businessline&lt;/a&gt; on April 3, 2015. Sunil Abraham gave his inputs.&lt;/p&gt;
&lt;hr /&gt;
&lt;p class="body" style="text-align: justify; "&gt;The Department of Electronics and Information Technology’s (Deity)  offices are as layered as its official website. From inside ‘Electronics  Niketan’ at the Central Government Offices (CGO) complex in south  Delhi, Deity’s army of director-generals, joint secretaries, department  heads, scientists, clerks and staff of various grades and ranks keep an  eye on how the country engages with the world wide web.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;One set of cubicles is dedicated to the Computer Emergency Response Team  (CERT), the nodal agency meant to combat hacking, phishing and  generally fortify the internet in India. This includes the task of  blocking and unblocking websites. A rather complicated job in a country  where, according to one senior government official, “it’s technically  infeasible to completely block content. If it’s at the gateway level,  then we can filter it out. But for videos and other similar content, it  is just not possible to completely block them.”&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;&lt;b&gt;No bandwidth&lt;/b&gt;&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;Be it the AIB roasts that were taken down from YouTube or the  controversial documentary India’s Daughter, which was blocked within  eight hours of going online, the CERT and other allied departments have  been kept busy over the past few months.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;In a classic example of how blocking can go wrong, more than 36 websites  were taken down in December last year to “prevent the spread of ISIS  propaganda” only to be unblocked within weeks. Like elsewhere in the  world, the attempt to “protect” citizens had unwittingly ended up  hurting legitimate websites, including video sharing sites vimeo.com,  dailymotion.com and the reference site archive.org. It was  embarrassingly similar to the Chinese government’s actions in 2010 when  it blocked all images of empty chairs, stools and tables as it attempted  to staunch discussions about Liu Xiabo, the Nobel Peace Prize winner  that year, who was missing from the awards ceremony as he was  incarcerated in China.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;Terming such government actions as dangerous and Orwellian, Apar Gupta, a  cyber law specialist in Delhi who appeared for the People’s Union for  Civil Liberties (PUCL) in the PIL against Section 66A of the IT Act,  says, “Any piece of content is contained within several file formats and  obscured through technical devices like encryption, making its complete  removal and eradication impossible.”&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;Internet freedom campaigners have maintained that Section 66A, which  prescribed “punishment for sending offensive messages through a  communication service”, was created solely to muzzle dissent and  differences of opinion.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;Although Section 66A was recently struck down, the law authorising  blocking of content — namely, Section 69A — remains intact. The Central  Government can block content it believes threatens the security of the  State; the sovereignty, integrity or defence of India; friendly  relations with foreign States; public order; or incites committing a  cognisable offence related to any of the above. The government must,  however, adhere to a set of procedures and safeguards, known as Blocking  Rules.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;“Larger, overbroad technical blocks can impede the functioning of the  internet,” says internet policy analyst Raman Chima. “When a large  website ‘blacklist’ and internet filter was proposed for Australia in  2009-10, research established that it would likely result in  double-digit reductions in the internet’s speed and efficiency in that  country.”&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;The ‘Streisand effect’, named after the Hollywood actress, is another  common consequence of blocking. As Chima says, “Specific bans tend to be  counterproductive and, more often than not, result in more awareness  and interest in the banned content.”&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;&lt;b&gt;Political manoeuvres&lt;/b&gt;&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;‘Ethical hacktivist’ and Hackers Hat founder Satish Ashwin sees banning and blocking as purely vote bank politics.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;“Technically anything can be blocked or banned and it’s not a big deal,  but the sheer volume of data uploaded makes it next to impossible to  monitor and censor,” he says.&lt;/p&gt;
&lt;p class="body" style="text-align: justify; "&gt;To those heralding the striking down of 66A as a victory for free  speech, Sunil Abraham, executive director of the Bengaluru-based Centre  for Internet and Society, points to the larger picture. “Nobody is  really aware of the scale of censorship in India. Thousands of websites  are blocked under Section 69A, mostly due to the maximalist enforcement  of Intellectual Property Rights (IPR). While 66A is gone, there are many  other provisions within the IT Act that still regulate speech online.  It is important to have quality laws drafted through an open,  participatory process, where all stakeholders are consulted and  responded to before bills are introduced in Parliament."&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/hindu-businessline-april-3-2015-sibi-arasu-the-block-heads'&gt;https://cis-india.org/internet-governance/news/hindu-businessline-april-3-2015-sibi-arasu-the-block-heads&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Chilling Effect</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2015-05-07T11:51:48Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/factor-daily-anand-murali-august-13-2018-the-big-eye">
    <title>The Big Eye: The tech is all ready for mass surveillance in India</title>
    <link>https://cis-india.org/internet-governance/news/factor-daily-anand-murali-august-13-2018-the-big-eye</link>
    <description>
        &lt;b&gt;Chennai’s T. Nagar, arguably India’s biggest shopping district by revenues and crowded on any given day, gets even more packed in festival seasons as thousands throng its saree and jewellery stores.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The blog post by Anand Murali was published in &lt;a class="external-link" href="https://factordaily.com/face-recognition-mass-surveillance-in-india/"&gt;Factor Daily&lt;/a&gt; on August 13, 2018. Sunil Abraham was quoted.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;Every year, Deepavali, less than three months away this year, presents the perfect hunting ground for pickpockets and other petty thieves — and a headache for the local police.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;This time, however, the city police have reason to believe it has a  handle on things. It has a technology that analyses CCTV footage to  spot, in real time, people with a criminal history visiting the T. Nagar  area. “We are matching real-time CCTV video footage with our criminal  database using the FaceTagr system and if any criminals are identified  in that area, we get an immediate alert and we can further investigate,”  says P Aravindan, deputy commissioner of police. Last year, FaceTagr, a  face recognition software developed by an eponymous Chennai company,  was used in a few areas with results that convinced the police to spread  it to all of the T Nagar area, he adds.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Aravindan’s counterparts in Punjab are as big fans of real-time  surveillance as him. Amritsar Police used something the state’s police  calls Punjab Artificial Intelligence System, or PAIS, developed by  Gurugram AI company Staqu Technologies, to solve a murder case within 24  hours — again, using CCTV footage and facial recognition technology.  The company has &lt;a href="https://tech.economictimes.indiatimes.com/news/startups/staqu-builds-an-android-smart-glass-platform-to-help-police-identify-criminals/63239706" rel="noopener nofollow external noreferrer" target="_blank"&gt;piloted&lt;/a&gt; a camera mounted on a pair of smart glasses to capture a real-time feed and analyse it for facial matches with a database.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Elsewhere, the Surat Police has a picture intelligence unit that  relies on NEC’s proprietary NeoFace technology for facial recognition,  as also vehicle number plate recognition, to &lt;a href="https://in.nec.com/en_IN/press/201507/global_20150719_2.html" rel="noopener nofollow external noreferrer" target="_blank"&gt;track persons of interest&lt;/a&gt;.  The result is alerts that the police can proactively act upon and  faster turnaround in solving cases. Surat can claim to be a step ahead  of Tokyo: NEC plans to use the latest version of its NeoFace technology  at the 2020 Tokyo Olympics to &lt;a href="https://www.sunherald.com/news/business/article216218290.html" rel="noopener nofollow external noreferrer" target="_blank"&gt;track accredited persons&lt;/a&gt; – athletes, officials, media, and others – at multiple venues.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Welcome to the Big Eye helping law keepers and administrators in  India to instantly recognise faces and use the information in multiple  use cases.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Facial recognition and image cognition tech is nothing new, to be  sure. We have seen them in movies for some time now – be it the Jason  Bourne series in which the CIA uses complex surveillance tech to track  the agent or the &lt;i&gt;Mission Impossible&lt;/i&gt; movies where the protagonist use facial recognition to get access to secure areas. Or, the recent Steven Spielberg movie, &lt;i&gt;Ready Player One&lt;/i&gt;,  in which the villain uses camera drones. This kind of advanced – and  even futuristic – image recognition-based surveillance all set to go  mainstream in India with the rapid proliferation of cameras: from the  public and private CCTVs to the ubiquitous mobile phone cameras.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Investigation on steroids&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;Chennai-based FaceTagr has been working with Indian Railways since  last year to prevent human trafficking. “Finding missing children and  the prevention of human trafficking was one of the first use cases that  we developed. We work with the Indian Railways, state police  departments, and CBI to prevent human trafficking,” says Vijay  Gnanadesikan, CEO and co-founder, FaceTagr.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;His moment of epiphany that led to the idea for developing FaceTagr  was on a morning drive to work in Chennai traffic and watching children  begging at his window. “I reached the office and discussed with my  cofounder. We realised that there is an existing database of missing  children with photographs and, with face recognition technology, we  could develop a solution that could help solve the problem and in a way  also prevent human trafficking,” says Gnanadesikan. Cut to today: the  tool has been deployed at the India-Nepal and India-Bangladesh borders  at nearly 24 checkpoints to monitor human trafficking.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;FaceTagr is a face recognition technology that works on both static  images and video footage. The same technology is being used in a  solution for the Chennai police to identify criminals. “Earlier a  suspect had to be taken to the police station, fingerprinted, and then  his details were verified. Imagine a guy walking on the road at 2 am who  is looking suspicious. A police patrol can take the suspect’s  photograph with our app and, within a second, receive details about his  crime history,” says Gnanadesikan.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The T. Nagar deployment runs on real-time CCTV footage. In the areas  it was deployed last year, the system helped reduce the number of crimes  “from three digits to a single digit” during last year’s Deepavali  season, claims the FaceTagr CEO.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The system compares the real-time CCTV footage of the crowd with the  police criminal database for facial matches. “Once someone from the  database is identified among the crowd, the picture shows up, which is  then re-verified by the police personnel monitoring the system for a  reconfirmation,” says Gnanadesikan, adding that an ID match does not  mean a crime is committed. “Someone might also be there for shopping and  we and the police team are very mindful of that, but it will give the  police a notification about the person’s whereabouts in the area.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;One of the clever outcomes of the deployment is that the system helps  identify criminals from other cities or areas. According to DCP  Aravindan, a police officer in Chennai city will likely not know of a  criminal from, say, Tirunelveli, Kanyakumari or other far off places.  This is where the face recognition system comes in handy, he says.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“Traditionally, we have data of all criminals station-wise and there  is also a crime team which is familiar with the criminals and can  recognise them. But, of late, with the improvement in connectivity and  communication, people from far-off places come and commit a crime and  this has made it challenging to identify them,” he says. The state’s  crime database currently has over 60,000 photographs with more  photographs being added daily. Every week, the department nabs two or  three criminals with the help of the face recognition system, Aravindan  adds.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Are there any privacy concerns? “To avoid misuse we have conducted  multiple training programs for all the police personnel who are using  this application and we have instructed them that unless they find a  person suspicious, they should not take a photograph. We have designed  an SOP (standard operating procedure) for using the system to avoid  misuse,” adds the deputy commissioner.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Surveillance on smart glass&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;The face recognition system of Staqu, the Gurgaon AI startup, has  been deployed in the states of Uttarakhand, Punjab and Rajasthan.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;According to Atul Rai, Staqu’s CEO and co-founder, different law  enforcement jurisdictions or agencies, even within a state, often have  their own sets of data and it becomes difficult to sift through them and  find links or patterns. Staqu’s answer to that problem was ABHED, short  for Artificial Intelligence Based Human Efface Detection, which formed  the base software for a mobile application and is connected to a backend  database processing system. “This system accumulates images, speech and  text, and using all this information, it develops intelligence for  these agencies,” says Rai.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The company has also developed a real-time video surveillance-based  face recognition technology that works via a camera mounted on a smart  glass. The system was piloted with the Punjab Police and the company is  now in the process of deploying with &lt;a href="https://tech.economictimes.indiatimes.com/news/startups/ai-startup-staqu-signs-mou-to-assist-dubai-police/64271484" rel="noopener nofollow external noreferrer" target="_blank"&gt;the Dubai Police&lt;/a&gt;, says Rai.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Most CCTVs today have a limited view and, in comparison, an officer  wearing the smart glass and moving in a crowd will have a better field  of view, says Rai. “In real time, the glass will stream the video  footage to the server, which will then match the footage and give the  report if any person from the database is detected,” he adds.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The Staqu-developed PAIS, or Punjab Artificial Intelligence System,  can image match with an accuracy of 98% if the database has five images  of the person, claims Rai.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Another use case for face recognition technology that has been coming  up in India is in the corporate sector for attendance and security.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“In many of the enterprise use cases, the technology is used in  controlled spaces – for example, conferences where most attendees  pre-register or employees access systems in companies,” says Uday  Chinta, managing director of American technology service company IPSoft,  which has also developed and deployed an AI-based personal assistant  called Amelia in the US. “Amelia is able to recognise a person using his  facial features and able to assist them and give personalised service  based on their identity,” says Chinta.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Software services company Tech Mahindra has launched a facial  recognition system for employee attendance at its Noida office.  According to &lt;a href="https://economictimes.indiatimes.com/news/company/corporate-trends/tech-mahindra-adopts-facial-recognition-to-mark-attendance/articleshow/65300255.cms" rel="noopener nofollow external noreferrer" target="_blank"&gt;one report&lt;/a&gt;,  the system also comes with a “moodometer” that will track the mood and  emotions of employees and give additional analytics to the company.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Beyond face analytics, image recognition technology is also being  used to identify vehicles. The National Highways Authority of India has  been using AI-based image recognition systems to tag and identify  vehicles across its infrastructure in the country.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Underlying digital layer: databases&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;The scarier part to the tech is its dark side: mass surveillance covering all. Countries like China have already deployed &lt;a href="https://www.theatlantic.com/international/archive/2018/02/china-surveillance/552203/" rel="noopener nofollow external noreferrer" target="_blank"&gt;mass surveillance on its citizens&lt;/a&gt;.  Chinese citizens today have a scoring system assigned to them by the  government based on various factors including data captured through the  surveillance program which will give the preferential access to services  like fast internet access.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In the case of India, to facilitate proper surveillance in a state,  one of the first requirements is a digital database which already exists  in many forms across central and state governments. With or without a  double take, the answer is obvious: Aadhaar, India’s citizen ID  database. With a population of 135 crore and Aadhaar covering over 90%  of this population, it is India’s most extensive database.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Notwithstanding the use cases detailed earlier in this story and the  huge interest among state police and law enforcement agencies in India,  collecting data and using it – even it is to bust crime – falls into  grey areas. In June this year, &lt;a href="https://indianexpress.com/article/india/ncrb-pitches-for-giving-police-limited-access-to-aadhaar-data-to-crack-crimes-5227541/" rel="noopener nofollow external noreferrer" target="_blank"&gt;news reports&lt;/a&gt; had National Crime Records Bureau director Ish Kumar saying that  investigators need to be given limited access to Aadhaar. Reacting to  this, the Unique Identification Authority of India (UIDAI) issued a &lt;a href="https://www.uidai.gov.in/images/news/Press-Note-on-rejecting-demand-of-access-to-Aadhaar-data-25062018.pdf" rel="noopener nofollow external noreferrer" target="_blank"&gt;statement&lt;/a&gt; saying that access to Aadhaar biometric data for criminal investigation  is not permissible under Section 29 of the Aadhaar Act, 2016 — which  perhaps explains why the Punjab Police declined requests for interviews  for this story.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Longtime Aadhaar critic Sunil Abraham, executive director of  Bengaluru’s Centre for Internet and Society (CIS), calls Aadhaar “the  perfect tool for surveillance”.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“The main database is the Aadhaar database. It’s got your iris and  biometrics information already and they have said that they will  strengthen the fingerprint authentication with facial recognition. So  now, they have the have the full surveillance infrastructure that they  need. The collection devices (CCTVs) are just there to collect the data  but the actual recognition engine is Aadhaar only,” says Abraham, who is  leaving CIS to join non-profit Mozilla Foundation as a vice president  in January.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;According to him, all three types of biometrics – fingerprint data,  iris information data, and facial data – can be used in a remote and  covert fashion and, therefore, in a non-consensual fashion. (&lt;i&gt;Editor’s note&lt;/i&gt;: There is no public incident, to date, that proves such a use.)&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Abraham is “100% sure” where we are headed. “The reason why I call  Aadhaar a surveillance project is not that there is metadata stored, I  call it a surveillance project because the biometrics are being stored.  Metadata is one of the problems, that is the profiling risk but the  surveillance risk primarily comes from the biometric data that they  have,” he says. By metadata, he is referring to a citizen’s information  such as phone number, age, sex, address, and other details.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;There are also other databases in the works that could provide the  basis for surveillance. Like: the Crime and Criminal Tracking Network  &amp;amp; Systems (CCTNS) across police stations in India. &lt;a href="http://ncrb.gov.in/BureauDivisions/cctnsnew/index.html" rel="noopener nofollow external noreferrer" target="_blank"&gt;According&lt;/a&gt; to the CCTNS website, as of May 2018, the CCTNS hardware and software  deployment has covered nearly 94% of the police stations across India.  There have been &lt;a href="https://thewire.in/government/hyderabad-smart-policing-surveillance" rel="noopener nofollow external noreferrer" target="_blank"&gt;reports&lt;/a&gt; of the CCTNS system being used as a mass surveillance system in the guise of e-policing by authorities in Hyderabad.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Early in 2016, the Hyderabad of Police had launched a &lt;a href="http://www.hyderabadpolice.gov.in/assets/tender/Integrated%20Information%20Hub(IIH).pdf" rel="noopener nofollow external noreferrer" target="_blank"&gt;tender&lt;/a&gt; looking for companies to set up a citizen profiling and monitoring system. According to a report in &lt;i&gt;Telangana Today&lt;/i&gt;,  the Integrated People Information Hub (IPIH) gives the police access to  personal informations of its citizens including names, family details,  addresses and other related information by sourcing them from documents  like police records, FIRs and other external sources like utility  connections, tax payments, voter identification, passport etc.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;During Israeli Prime Minister Benjamin Netanyahu’s visit to India in January, Tel Aviv-based AI company Cortica had &lt;a href="https://www.prnewswire.com/news-releases/prime-ministers-narendra-modi-and-benjamin-netanyahu-welcome-new-age-of-collaboration-for-israel-and-india-300589299.html" rel="nofollow external noopener noreferrer"&gt;announced&lt;/a&gt; a partnership with India’s Best Group to develop solutions for combing  through data captured daily by drones, surveillance cameras, and  satellites. The aim is to develop an AI-based real-time identification  of patterns, concepts and situational anomalies to identify potential  problems, flag them and improve safety in the process. More details such  as scale and scope of this partnership are not available at this point  in time.&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Mass surveillance: Easier said than done&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;Take a step back. India already has multiple digital surveillance –  even if not mass, real-time facial recognition – programs in place to  keep track of its citizens. E.g.: the Telecom Enforcement Resource and  Monitoring (TERM) and NETRA (NEtwork TRaffic Analysis) surveillance  software developed by the Centre for Artificial Intelligence and  Robotics (CAIR). These are just some of the surveillance programs  operated by the government.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;But when it comes to mass surveillance in real time, even with the  AI-based tech is available today, the currently installed infrastructure  might not be ready for real-time mass surveillance. “Countries like  China are good at setting up infrastructure which is very essential for  mass surveillance systems to be in place,” says Kedar Kulkarni of  Bengaluru-based deep learning startup Hyperverge, who also insists that  all CCTVs out there today might not be fit to conduct facial  recognition.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;According to Kulkarni, for a mass surveillance system to be in place,  you either need cameras that can capture and do computing for face  recognition within its hardware or you need a robust network which can  transmit live feeds from multiple cameras to processing centres, which  is very bandwidth intensive.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Most public spaces in India including railway stations, bus depots,  metro station, marketplaces are often under CCTV surveillance. New Delhi  is all set to have one of the largest deployments in the country of  CCTVs with the state government announcing plans to install 1.4 lakh  CCTVs across Delhi. The India Railways is also setting aside Rs 3,000  crore in its 2018-19 budget to install CCTV systems across 11,000 trains  and 8,500 stations, according to a news report.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In comparison, China is said to have 170 million CCTV cameras  installed across the country currently and this number is estimated to  go up by 400 million in the next three years, says a BBC news report.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Even the staunchest privacy activists acknowledge what surveillance  can deliver if used carefully. “Overall, it is a very powerful  technology. It should be used for law enforcement, it should be used for  national security. That is the correct domain of application,” says  Abraham. He hastens to add the caveats: “When we use it, we have to use  it with lots of safeguards and it should be used only on a very small  subset of the population. It shouldn’t be a technology that is broadly  deployed in the population because it is not necessary, it is not  proportionate, and the risks are very high.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The flip and funny side of facial recognition-based surveillance is  that the government does not need the technology to actually work. Just  the threat of surveillance – that big brother is watching you – is  enough to reduce crime. According to Gnanadesikan, the Chennai CEO of  FaceTagr, one reason for the drop in crime rate in last year’s T. Nagar  trials was that criminals knew that they were being watched.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/factor-daily-anand-murali-august-13-2018-the-big-eye'&gt;https://cis-india.org/internet-governance/news/factor-daily-anand-murali-august-13-2018-the-big-eye&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2018-08-13T14:54:14Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/bloomberg-alex-mathew-october-20-2016-the-big-debit-card-breach">
    <title>The Big Debit Card Breach: Three Things Card Holders Need To Understand</title>
    <link>https://cis-india.org/internet-governance/news/bloomberg-alex-mathew-october-20-2016-the-big-debit-card-breach</link>
    <description>
        &lt;b&gt;A total of 32 lakh debit cards across 19 banks could have been compromised on account of a purported fraud, the National Payment Corporation of India said in a statement.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The article by Alex Mathew was &lt;a class="external-link" href="http://www.bloombergquint.com/business/2016/10/20/indias-biggest-security-breach-32-lakh-debit-cards-across-19-banks-may-have-been-compromised"&gt;published by Bloomberg&lt;/a&gt; on October 20, 2016. Udbhav Tiwari was quoted.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;The  issue was brought to light when State Bank of India blocked the debit  cards of 6 lakh customers on October 14. This was done after the bank  was alerted to a possible fraud by the National Payment Corporation of  India, MasterCard and Visa, said Managing Director Rajnish Kumar in a  telephonic interview with BloombergQuint.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In a statement released  on Thursday evening, the NPCI clarified that the problem was brought to  their attention when they received complaints from a few banks that  customers’ cards were used fraudulently, mainly in China and the U.S.,  while those cardholders were in India.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“The complaints of  fraudulent withdrawal are limited to cards of 19 banks and 641  customers. The total amount involved is Rs 1.3 crore as reported by  various affected banks to NPCI,” the payments corporation said.&lt;/p&gt;
&lt;div class="story__element__wrapper"&gt;
&lt;div class="story__element__image story__element"&gt;&lt;figure&gt; &lt;img src="https://cis-india.org/home-images/Card.png" alt="Card" class="image-inline" title="Card" /&gt;&lt;br /&gt; &lt;/figure&gt;&lt;/div&gt;
&lt;/div&gt;
&lt;div class="story__element__wrapper"&gt;
&lt;div class="story__element__text story__element"&gt;
&lt;div class="story-element-"&gt;
&lt;p style="text-align: justify; "&gt;SISA Security, a Bengaluru-based company is currently undertaking  a forensic study to identify the extent of the problem and will submit a  final report in November.&lt;/p&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class="story__element__wrapper"&gt;
&lt;div class="story__element__text story__element"&gt;
&lt;div class="story-element-blockquote"&gt;
&lt;div&gt;
&lt;blockquote&gt;Based on the advisory issued by NPCI and other schemes, it is gathered  that banks have advised their customers to change their debit card PIN.  In situations where customers could not be contacted, the cards have  been blocked and fresh cards are being issued by member banks.&lt;/blockquote&gt;
&lt;span class="attribution"&gt;NPCI statement&lt;/span&gt;&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;div class="story__element__wrapper"&gt;
&lt;div class="story__element__text story__element"&gt;
&lt;div class="story-element-"&gt;
&lt;p style="text-align: justify; "&gt;State Bank of India has blocked 6 lakh cards, while other banks  have sent notifications to customers advising them to change their  personal identification numbers.&lt;/p&gt;
&lt;h3&gt;How The Breach Could Have Occured&lt;/h3&gt;
&lt;p&gt;The breach that has apparently given hackers access to the PIN codes  of several bank customers is likely to be on account of a malware  attack. This attack is believed to have originated at an ATM.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The  actual modus operandi of the hackers will only become clear once the  forensic audit is released in November, but BloombergQuint spoke to  cyber security expert Udbhav Tiwari to find out how the attack could  have been orchestrated.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;First, the hacker would have had to gain  physical access to an ATM. The malware was then likely injected by  connecting a laptop or another special device to a port on the cash  disbursing machine, said Tiwari, a consultant at Centre For Internet  &amp;amp; Society in Bengaluru.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Once the malware is injected, it  automatically spreads across the network and infects other devices that  are not protected against it. In this case, the malware could have  infected a payment switch provider’s network.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;A payment switch  provider is an entity that facilitates a transaction either from an ATM  or an online payment gateway. The service provider decides to whom the  request for authorisation will be sent and then transmits the request  back to the merchant or the ATM where the transaction originated.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In  this case, one payment switch provider, Hitachi Payment Services, which  manages close to 50,000 ATMs across the country, was asked by banks to  investigate 30 of its ATMs on account of around 400 suspicious  transactions that took place outside India, Managing Director Loney  Antony told BloombergQuint in a telephonic interview.&lt;/p&gt;
&lt;p&gt;The company  had earlier said in a statement that an interim report by the audit  agency does not suggest any breach or compromise in its systems.&lt;/p&gt;
&lt;h3&gt;The Scale Of The Breach&lt;/h3&gt;
&lt;p style="text-align: justify; "&gt;According  to a study conducted by NPCI in collaboration with the banks, the  number of debit cards that were infected by the malware has been set at  32 lakh. But Tiwari said this number could be higher.&lt;/p&gt;
&lt;div class="story__element__wrapper"&gt;
&lt;div class="story__element__text story__element"&gt;
&lt;div&gt;
&lt;blockquote&gt;The hypothetical limit to how much the malware can spread is dependent  on the vulnerability of the systems, and if one of the payment switch  provider’s systems was vulnerable and they still haven’t decided how  many systems are vulnerable, it is quite possible that the malware is  spreading at this point.&lt;/blockquote&gt;
&lt;/div&gt;
&lt;p&gt;&lt;span class="attribution"&gt;Udbhav Tiwari, Consultant, Centre For Internet &amp;amp; Society&lt;/span&gt;&lt;/p&gt;
&lt;h3&gt;&lt;span class="attribution"&gt;What A Customer Should Do&lt;/span&gt;&lt;/h3&gt;
&lt;p&gt;The first, and most important step a customer should take is to immediately change their debit card PIN, Tiwari pointed out.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;State  Bank of India has said that its customers can opt to restrict the usage  of their debit cards, for example whether it can be used both  internationally and domestically or only domestically. Also, the daily  limit of the debit card can be changed.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Once these steps have been  taken, according to Tiwari, it is most important that customers stay  vigilant and keep monitoring their bank statements. If an unauthorised  transaction takes place, a customer should immediately contact their  bank and block their card.&lt;/p&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
&lt;/div&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/bloomberg-alex-mathew-october-20-2016-the-big-debit-card-breach'&gt;https://cis-india.org/internet-governance/news/bloomberg-alex-mathew-october-20-2016-the-big-debit-card-breach&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Cyber Security</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2016-10-21T13:43:17Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/the-benefits-and-challenges-of-the-201cfree-flow201d-of-data">
    <title>The Benefits and Challenges of the “Free Flow” of Data</title>
    <link>https://cis-india.org/internet-governance/news/the-benefits-and-challenges-of-the-201cfree-flow201d-of-data</link>
    <description>
        &lt;b&gt;Internet Governance Forum (IGF) 2015 will be held at Jao Pessoa in Brazil from November 10 to 13, 2015. The theme of IGF 2015 is Evolution of Internet Governance: Empowering Sustainable Development. Sunil Abraham is a panelist in this workshop organized by Public Knowledge on November 12, 2015.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The Internet was designed so that global data flows would be dictated by efficiency, rather than centralized control or oversight. This engineering principle has provided businesses and consumers with access to the best available technology, information, and services, wherever those resources may be located around the world. It has benefitted virtually all industry sectors, from manufacturing to financial services, education, health care, and beyond. The “free flow” of data is what has allowed the Internet flourish into what it is today.&lt;br /&gt;&lt;br /&gt;Yet governments, corporations, and non-state actors around the world are increasingly employing a variety of technical, legal, and administrative tools to restrict data flows, limiting routing and data storage to particular jurisdictions and restricting the kinds of content and data types that are permitted online. Some of these restrictions have been put in place for legitimate purposes, designed to further privacy protections, network security, and fair commerce, and have been justified within the bounds of international law and norms. Others, however, are less defensible, and are intended to unfairly support preferred commercial interests or to quell domestic political dissent.&lt;br /&gt;&lt;br /&gt;This panel will discuss the many benefits and challenges of the free flow of data. It will foster a discussion of the ways in which stakeholders can address the underlying reasons for data flow restrictions (such as the need for law enforcement access to data or the desire to nurture local ICT industry development, etc.) without subverting the Internet’s core potential for innovation, economic growth, and public welfare.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Name, stakeholder group, and organizational affiliation of workshop proposal co-organizer(s)&lt;/b&gt;&lt;br /&gt;Carolina Rossini&lt;br /&gt;Civil Society&lt;br /&gt;Public Knowledge&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Has the proposer, or any of the co-organizers, organized an IGF workshop before?&lt;/b&gt;&lt;br /&gt;&lt;br /&gt;no&lt;br /&gt;&lt;br /&gt;Subject matter #tags that describe the workshop&lt;br /&gt;&lt;br /&gt;#innovation #barriers #policy #cross-boarder flow #privacy&lt;br /&gt;&lt;br /&gt;&lt;b&gt;Description of the plan to facilitate discussion amongst speakers, audience members and remote participants&lt;br /&gt;&lt;/b&gt;&lt;br /&gt;Each panelist will be given approximately 3 minutes for opening remarks, followed by a moderated discussion, and then audience question and answer. Remote participants will be given the opportunity to ask questions over an online forum, such as Webx and Twitter.&lt;br /&gt;&lt;br /&gt;&lt;b&gt;Names and affiliations (stakeholder group, organization) of the participants in the proposed workshop&lt;/b&gt;&lt;/p&gt;
&lt;table class="grid listing"&gt;
&lt;tbody&gt;
&lt;tr class="even"&gt;
&lt;td&gt;
&lt;ul&gt;
&lt;li&gt; Name Carolina Rossini&lt;/li&gt;
&lt;li&gt;Stakeholder group: Civil Society&lt;/li&gt;
&lt;li&gt;Organization: Public Knowledge&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: She is a  world-renowned expert on Internet policy and law, a Brazilian national.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt;Name Vint Cerf&lt;/li&gt;
&lt;li&gt;Stakeholder group: Private Sector/Technical Community&lt;/li&gt;
&lt;li&gt;Organization: Google&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: He has been involved  in Internet issues for many years and currently serving in influential  vice president and “chief evangelist” role at Google.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt; &lt;/p&gt;
&lt;ul&gt;
&lt;li&gt; Name Lawrence Strickling&lt;/li&gt;
&lt;li&gt;Stakeholder group: Government&lt;/li&gt;
&lt;li&gt;Organization: U.S. Department of Commerce, NTIA&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: He is the head of one  of the United States government’s principal Internet policy agencies.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;li&gt;Name Richard Leaning&lt;/li&gt;
&lt;li&gt;Stakeholder group: Government&lt;/li&gt;
&lt;li&gt;Organization: European Cyber Crime Centre (EC3), Europol&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: He understands the  needs of the law enforcement community from a European perspective, a  British national.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt; &lt;/p&gt;
&lt;ul&gt;
&lt;li&gt; Name Marietje Schaake&lt;/li&gt;
&lt;li&gt;Stakeholder group: Government&lt;/li&gt;
&lt;li&gt;Organization: European Parliament&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: She is a prominent  privacy advocate within the European Parliament, a Netherlands national.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt; &lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Name Nasser Kettani&lt;/li&gt;
&lt;li&gt;Stakeholder group: Private Sector&lt;/li&gt;
&lt;li&gt;Organization: Microsoft&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: He helps build and  design data centers for Microsoft in Africa, a Moroccan national.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? Yes&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt; &lt;/p&gt;
&lt;ul&gt;
&lt;li&gt; Name Sunil Abraham&lt;/li&gt;
&lt;li&gt;Stakeholder group: Civil Society&lt;/li&gt;
&lt;li&gt;Organization: Centre for Internet and Society, India&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: He is the executive  director of one of India’s most influential Internet policy think tanks  and advocacy groups.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? No, but know him well.&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt; &lt;/p&gt;
&lt;ul&gt;
&lt;li&gt; Name Zahra Rose&lt;/li&gt;
&lt;li&gt;Stakeholder group: Civil Society&lt;/li&gt;
&lt;li&gt;Organization: Developing Countries' Centre for Cyber Crime Law&lt;/li&gt;
&lt;li&gt;Describe why this speaker has been selected: A lawyer, she  understands the needs of the law enforcement community from a civil  society perspective in Pakistan.&lt;/li&gt;
&lt;li&gt;Have you contacted the speaker? No&lt;/li&gt;
&lt;/ul&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;
&lt;p class="title"&gt;&lt;b&gt;Name of in-person Moderator(s)&lt;/b&gt;&lt;/p&gt;
&lt;p&gt;Jonah Force Hill&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;
&lt;p class="title"&gt;&lt;b&gt;Name of Remote Moderator(s)&lt;/b&gt;&lt;/p&gt;
&lt;p&gt;Winter Casey, U.S. Department of Commerce, NTIA&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;
&lt;p class="title"&gt;&lt;b&gt;Name of Rapporteur(s)&lt;/b&gt;&lt;/p&gt;
&lt;p&gt;Seth Bouvier, U.S. Department of State&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;
&lt;p class="title"&gt;Description of the proposer's plans for remote participation&lt;/p&gt;
&lt;p&gt;We intend to utilize the IGF’s WebX system to include remote  participants in the question and answer portion of the panel. The remote  participants will be afforded equal/proportional representation in the  discussion. The remote moderator will facilitate the Q&amp;amp;A with the  moderator. We’ll need a screen in the room to display the remote  comments.&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;p&gt;For more info visit &lt;a class="external-link" href="https://www.intgovforum.org/cms/wks2015/index.php/proposal/view_public/65"&gt;IGF website&lt;/a&gt;.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/the-benefits-and-challenges-of-the-201cfree-flow201d-of-data'&gt;https://cis-india.org/internet-governance/news/the-benefits-and-challenges-of-the-201cfree-flow201d-of-data&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance Forum</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2015-11-08T02:09:40Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/news/sant-ox-ac-uk-may-31-2013-bapsybanoo-marchioness-winchester-lectures">
    <title>The Bapsybanoo Marchioness of Winchester Lectures</title>
    <link>https://cis-india.org/news/sant-ox-ac-uk-may-31-2013-bapsybanoo-marchioness-winchester-lectures</link>
    <description>
        &lt;b&gt;Chinmayi Arun was a speaker at the Bapsybanoo Marchioness of Winchester Lectures on 'India's Politics of Free Expression' in the University of Oxford on May 31 2013, in the session on 'media and security'. &lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The Asian Studies Centre, Free Speech Debate, the Oxford India Society and Ideas for India Oxbridge Exchange were the co-sponsors for this event.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Theme: India's Politics of Free Expression&lt;br /&gt;Date: May 31, 2013&lt;br /&gt;Time: 10.00 a.m. to 6.15 p.m.&lt;br /&gt;Venue: Nissan Lecture Theatre, St. Antony's College, Oxford&lt;/p&gt;
&lt;hr /&gt;
&lt;p&gt;More details can be found &lt;a class="external-link" href="http://www.sant.ox.ac.uk/asian/indlects.pdf"&gt;here&lt;/a&gt;.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/news/sant-ox-ac-uk-may-31-2013-bapsybanoo-marchioness-winchester-lectures'&gt;https://cis-india.org/news/sant-ox-ac-uk-may-31-2013-bapsybanoo-marchioness-winchester-lectures&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2013-06-09T03:35:38Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/awesom-contracts-project">
    <title>The Awesome Contracts Project (Geekup @ CIS)</title>
    <link>https://cis-india.org/internet-governance/awesom-contracts-project</link>
    <description>
        &lt;b&gt;Vivek Durai, co-founder at Awesome Contracts, a Singapore-India startup will give a public lecture on May 18, 2012 at the Centre for Internet &amp; Society in Bangalore. Lawyer, musician, legal recruiter and entrepreneur, Amith Narayan will also participate through Skype!&lt;/b&gt;
        
&lt;h2&gt;The Awesome Contracts Project&lt;/h2&gt;
&lt;p&gt;Contracts are ubiquitous in our everyday life. They are also a nuisance. And they typically come attached with a bigger nuisance - lawyers! Interestingly though, contracts are a lot like code. Geek-lawyers, a very small, minuscule tribe on this planet, tend to notice a lot of similarities between the two. If this is true, it opens up a lot of possibilities, including changing the way we do business and in particular generate contracts, negotiate and seal deals.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;We'll talk about some of the technology and some of the products we're working on that we think can provide power to a lot of folks.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h2&gt;Agenda&lt;/h2&gt;
&lt;table class="plain"&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;17:00 - 17:05&lt;/td&gt;
&lt;td&gt;Welcome with Tea, Coffee, and Snacks&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;17:05 - 17:15&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Lightning Talks&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;17:15 - 18:00&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;The Awesome Contracts Project&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;
&lt;div class="time"&gt;18:00 - 18:30&lt;/div&gt;
&lt;/td&gt;
&lt;td&gt;Q &amp;amp; A&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;h2&gt;Vivek Durai&lt;/h2&gt;
&lt;p&gt;Vivek Durai is a co-founder at Awesome Contracts, a Singapore-India startup that is working on interesting problems in the field of law and contracts. As with just about everyone else, he and co-founder Amith Narayan would like to change the world. Preferably, for the better. Vivek and Amith are both alumnus of the National Law School of India University.&lt;br /&gt;&lt;br /&gt;Vivek is a lawyer by training, a geek by nature, and generally human. Most of the time. As far as ideologies go, Vivek is a Pythonista currently flirting with Node and other things. He is also incidentally a Partner at Atman Law Partners, a young three office boutique law firm.&lt;/p&gt;
&lt;h2&gt;Amith Narayan&lt;/h2&gt;
&lt;p&gt;Amith Narayan loves hats. He likes them so much in fact, he's been wearing all kinds. He has been a corporate lawyer, a musician, a record producer, a legal recruiter, and now an entrepreneur running this crazy little startup. Amith grew up in Calicut (Kozhikode) Kerala, trained in law at NLSIU, worked in the grand dame of the Indian legal world - Crawford Bayley - before moving to Singapore where he's been living for the past 10 years. Amith will join us over Skype during the talk.&lt;br /&gt;&lt;br /&gt;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/awesom-contracts-project'&gt;https://cis-india.org/internet-governance/awesom-contracts-project&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Lecture</dc:subject>
    
    
        <dc:subject>Event Type</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2012-05-11T12:17:09Z</dc:date>
   <dc:type>Event</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/the-audacious-right-to-be-forgotten">
    <title>The Audacious ‘Right to Be Forgotten’</title>
    <link>https://cis-india.org/internet-governance/blog/the-audacious-right-to-be-forgotten</link>
    <description>
        &lt;b&gt;There has long been speculation over the permanency of our online presence. Posting about excessively-personal details, commenting in a way which is later embarrassing, being caught in unflattering public photos; to our chagrin, all of these unfortunate situations often persist on the web, and can continue to haunt us in future years.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;Perhaps less dire, what if someone decides that she no longer wants the history of her internet action stored in online systems?&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;So far, there has been confusion over what should be done, and what realistically &lt;i&gt;can&lt;/i&gt; be done about this type of permanent presence on a platform as complex and international in scope as the internet. But now, the idea of a right to be forgotten may be able to define the rights and responsibilities in dealing with unwanted data.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The right to be forgotten is an interesting and highly contentious concept currently being debated in the new European Union Data Protection Regulations.&lt;a href="#fn1" name="fr1"&gt;[1]&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The Data Protection Regulation Bill was proposed in 2012 by EU Commissioner Viviane Reding and stands to replace the EU’s previous Data Protection law, which was enacted in 1995. Referred to as the “right to be forgotten” (RTBF), article 17 of the proposal would essentially allow an EU citizen to demand service providers to “take all reasonable steps” to remove his or her personal data from the internet, as long as there is no “legitimate” reason for the provider to retain it.&lt;a href="#fn1" name="fr1"&gt;[1]&lt;/a&gt; Despite the evident emphasis on personal privacy, the proposition is surrounded by controversy and facing resistance from many parties. Apparently, there are a range of concerns over the ramifications RTBF could bring.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Not only are major IT companies staunchly opposed to the daunting task of being responsible for the erasure of data floating around the web, but governments like the United States and even Great Britain are objecting the proposal as well.&lt;a href="#fn2" name="fr2"&gt;[2]&lt;/a&gt;,&lt;a href="#fn3" name="fr3"&gt;[3]&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;From a commercial aspect, IT companies and US lobbying forces view the concept of RTBF as a burden and a waste of resources for service providers to implement. Largely due to the RTBF clause, the new EU Data Protection proposal as a whole has witnessed intense, “unprecedented” lobbying by the largest US tech companies and US lobby groups&lt;a href="#fn4" name="fr4"&gt;[4]&lt;/a&gt;,&lt;a href="#fn5" name="fr5"&gt;[5]&lt;/a&gt;. From a different angle, there are those like Great Britain, whose grievances with the RTBF are in its overzealous aim and insatiable demands.&lt;a href="#fn2" name="fr2"&gt;[2]&lt;/a&gt; There are doubts as to whether a company will even be able to track down and erase all forms of  the data in question. The British Ministry of Justice stated, "The UK does not support the right to be forgotten as proposed by the European commission. The title raises unrealistic and unfair expectations of the proposals."&lt;a href="#fn2" name="fr2"&gt;[2]&lt;/a&gt; Many experts share these feasibility concerns. The Council of European Professional Informatics Societies (CEPIS) wrote a short report on the ramifications of cloud computing practices in 2011, in which it conformed, “It is impossible to guarantee complete deletion of all copies of data. Therefore it is difficult to enforce mandatory deletion of data. Mandatory deletion of data should be included into any forthcoming regulation of Cloud Computing services, but still it should not be relied on too much: the age of a ‘Guaranteed complete deletion of data’, if it ever existed has passed."&lt;a href="#fn6" name="fr6"&gt;[6]&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Feasibility aside, the most compelling issue in the debate over RTBF is the demanding challenge of balancing and prioritizing parallel rights. When it comes to forced data erasure, conflicts of right to be forgotten versus freedom of speech and expression easily arises. Which right takes precedence over the other?&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Some RTBF opponents fear that RTBF will hinder freedom of speech. They have a valid point. What is the extent of personal data erasure? Abuse of RTBF could result in some strange, Orwellian cyberspace where the mistakes or blemishes of society are all erased or constantly amended, and only positivity fills the internet. There are reasonable fears that a chilling effect may come into play once providers face the hefty noncompliance fines of the Data Protection law, and begin to automatically opt for customer privacy over considerations for freedom of expression. Moreover, what safeguards may be in place to prevent politicians or other public figures from removing bits of unwanted coverage?&lt;/p&gt;
&lt;p&gt;Although these examples are extreme, considerations like these need to be made in the development of this law. With the amount of backlash from various entities, it is clear that a concept like the right to be forgotten could not exist as a simple, generalized law. It needs refinement.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Still, the concept of a RTBF is not without its supporters. Viktor Mayer-Schönberger, professor of Internet Governance at Oxford Internet Institute, considers RTBF implementation feasible and necessary, saying that even if it is difficult to remove all traces of an item, "it might be in Google's back-up, but if 99% of the population don't have access to it you have effectively been deleted."&lt;a href="#fn7" name="fr7"&gt;[7]&lt;/a&gt; Additionally, he claims that the undermining of freedom of speech and expression is "a ridiculous misstatement."&lt;a href="#fn7" name="fr7"&gt;[7]&lt;/a&gt; To him, the right to be forgotten is tied intricately to the important and natural process of forgetting things of the past.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Moreover, the Data Protection Regulation does mention certain exceptions for the RTBF, including protection for "journalistic purposes or the purpose of artistic or literary expression." &lt;a href="#fn1" name="fr1"&gt;[1]&lt;/a&gt; The problem, however, is the seeming contradiction between the RTBF and its own exceptions. In practice, it will be difficult to reconcile the powers granted by the RTBF with the limitations claimed in other sections of the Data Protection Regulation.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Currently, the are a few clean and straight forward implementations of RTBF. One would be the removal of mined user data which has been accumulated by service providers. Here, invoking the right would be possible once a person has deleted accounts or canceled contracts with a service (thereby fulfilling the notion that the service no longer has "legitimate" reason to retain the data). Another may be in the case of personal data given by minors who later want their data removed, which is an important example mentioned in Reding’s original proposal.&lt;a href="#fn4" name="fr4"&gt;[4]&lt;/a&gt; These narrow cases are some of the only instances where RTBF may be used without fear of interference with other social rights. Broader implementations of the RTBF concept, under the current unrefined form, may cause too many conflicting areas with other freedoms, and especially freedom of expression.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Overall, the Right to Be Forgotten is a noble concept, born out of concern for the citizen being overpowered by the internet. As an early EU publication states, "The [RTBF] rules are about empowering people, not about erasing past events or restricting the freedom of the press."&lt;a href="#fn8" name="fr8"&gt;[8]&lt;/a&gt; But at this point, too many clear details seem to be lacking from the draft design of the RTBF. There is concern that without proper deliberation, the concept could lead to unforeseen and undesirable outcomes. Privacy is a fundamental right that deserves to be protected, but policy makers cannot blindly follow the ideals of one right to the point where it interferes with other aspects of society.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Fortunately, recent amendment proposals have attempted some refinement of the bill. Jeffrey Rosen writes in the Stanford Law Review about a certain key concept that could help legitimize the right, namely an amendment proposing that only personally contributed data may be rescinded.&lt;a href="#fn9" name="fr9"&gt;[9]&lt;/a&gt; This would help avoid interference with others’ rights to expression, and provide limitations on the extent of right to be forgotten claims. As Leslie Harris, president of the Center for Democracy and Technology wrote in the Huffington Post, amendments are needed which can specifically define personal data in the RTBF sense; thereby distinguishing which type of data is allowed to be removed.&lt;a href="#fn10" name="fr10"&gt;[10]&lt;/a&gt; In the upcoming months, the European Parliament will be considering such amendments to the proposal. This time will be crucial as it will determine if the development of the right to be forgotten will make it a viable option for the EU’s 500 million citizens.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;But even after terms are defined and after safeguards are established, this underling philosophical question remains:&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Should a person be able to reclaim the right to privacy after willingly giving it up in the first place? &lt;/b&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The RTBF is obviously a contentious topic, one which may need to be gauged individually by nation states; it will soon be revealed if the EU becomes the first to adopt the right. If RTBF fails to pass in European parliament, I would hope that it at least serves to remind people of the permanence of the data which they add to the internet, further incentivizing careful consideration of what one yields to the web. Rights frequently evolve and expand to meet societal or technological advances. If we are to expand the concept of privacy, however, then we must do so with proper consideration, so that privacy may not gain disproportionate power over other rights, or vice versa.&lt;/p&gt;
&lt;hr /&gt;
&lt;p&gt;[&lt;a href="#fr1" name="fn1"&gt;1&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/WSZvHv"&gt;http://bit.ly/WSZvHv&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr2" name="fn2"&gt;2&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/YxKaNJ"&gt;http://bit.ly/YxKaNJ&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr3" name="fn3"&gt;3&lt;/a&gt;]. &lt;a class="external-link" href="http://tcrn.ch/YdH82f"&gt;http://tcrn.ch/YdH82f&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr4" name="fn4"&gt;4&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/196E8qj"&gt;http://bit.ly/196E8qj&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr5" name="fn5"&gt;5&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/wJKWTZ"&gt;http://bit.ly/wJKWTZ&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr6" name="fn6"&gt;6&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/15aoknF"&gt;http://bit.ly/15aoknF&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr7" name="fn7"&gt;7&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/Z3JbRU"&gt;http://bit.ly/Z3JbRU&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr8" name="fn8"&gt;8&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/xfodhI"&gt;http://bit.ly/xfodhI&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr9" name="fn9"&gt;9&lt;/a&gt;]. &lt;a class="external-link" href="http://bit.ly/13uyda5"&gt;http://bit.ly/13uyda5&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;[&lt;a href="#fr10" name="fn10"&gt;10&lt;/a&gt;]. &lt;a class="external-link" href="http://huff.to/16P2XIS"&gt;http://huff.to/16P2XIS&lt;/a&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/the-audacious-right-to-be-forgotten'&gt;https://cis-india.org/internet-governance/blog/the-audacious-right-to-be-forgotten&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>kovey</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2013-07-31T10:08:55Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/iacs-summer-school-2012">
    <title>The Asian Edge: 2012 Inter-Asia Cultural Studies Society Summer School </title>
    <link>https://cis-india.org/internet-governance/iacs-summer-school-2012</link>
    <description>
        &lt;b&gt;The 2nd Biannual Inter Asia Cultural Studies (IACS) Summer School will be hosted in Bangalore, India by the Centre for the Study of Culture and Society (CSCS) and the Centre for Internet and Society (CIS) with the Inter Asia Cultural Studies Consortium. The event will be held in the first and second week of August 2012.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The  IACS Summer School brings together South and East Asian experts from  different disciplines as faculty for graduate and advanced research  students to engage with key issues of larger social, cultural and  political concerns in Cultural Studies in Asia. Any student registered  in a post-graduate degree program is eligible for the IACS Summer  School. There are limited seats and students will be selected based on  their applications. Students registered at universities participating in  the Consortium of Inter Asia Cultural Studies Institutions will be  given first preference.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The  Summer School 2012 proposes to integrate the teaching with core IACS  faculty with the larger realities of change in South and East Asia. It  proposes a 10 + 4 day structure.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Core&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Course: Methodologies for Cultural Studies in Asia:&lt;/b&gt;&lt;b&gt; &lt;/b&gt;The  Summer School offers a 10 day core course that works through seminars,  taught classrooms, tutorials, open spaces, field trips and workshops.  The core course shall address questions of Cultural Identity, Modernity,  Nationalism, Gender, Class, Revolution and Asianism to frame an  argument about relocating methods, concepts and ideas in contemporary  Cultural Studies in Asia.&lt;/p&gt;
&lt;table class="listing grid"&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;th&gt;Day/Date&lt;/th&gt;&lt;th&gt;Time&lt;/th&gt;&lt;th&gt;Session&lt;/th&gt;&lt;th&gt;Instructors&lt;/th&gt;&lt;th&gt;Readings&lt;/th&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 02&lt;br /&gt;(Thu)&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;09:30-10:00&lt;/td&gt;
&lt;td&gt;Introduction to Course/Orientation&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;Session 1: The Question of Knowledge&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Instructors: Daniel PS Goh Nithin Manayath&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;The Epistemological Value of East Asian Perspective – Sun Ge&lt;br /&gt;&lt;br /&gt;Knowledge Production in the Era of Neo-Liberal Globalisation – Kuan-Hsing Chen&lt;br /&gt;&lt;br /&gt;Teaching versus Research? – Meaghan Morris&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00 &lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Student Presentations&lt;/td&gt;
&lt;td&gt;Choi, Ji Yeon&lt;br /&gt;Ajinkya Shenava&lt;br /&gt;Khetrimayum M Singh&lt;br /&gt;Vincent Chung&lt;br /&gt;Jaime Fang-Tze Hsu&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 03&lt;br /&gt;(Fri)&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Culture Industries workshop&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Workshop party&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 04&lt;br /&gt;(Sat)&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;10:00-1:00 &lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Session 2: The Question of Culture&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Instructors: &lt;br /&gt;Asha Achuthan&lt;br /&gt;Ratheesh Radhakrishnan&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Hind Swaraj – Chs IV, VI, XII, XIII – MK Gandhi&lt;br /&gt;&lt;br /&gt;Value Typology of Chinese Peasants and Its Transformation in Contemporary China – He Xuefeng&lt;br /&gt;&lt;br /&gt;An Elaborative Argumentation of a Nong-Country – Zhang Shi Zhao&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00 &lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Student Presentations&lt;/td&gt;
&lt;td&gt;Annisa Beta&lt;br /&gt;Ying-Tzu, Liu (Eva)&lt;br /&gt;Li, Yen-Chieh&lt;br /&gt;Sharib Aqleem Ali&lt;br /&gt;Li, Cho Kiu (Joseph)&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Venue: 1 Shanti Road&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;6:00-8:00&lt;/td&gt;
&lt;td&gt;EVENING SALON&lt;/td&gt;
&lt;td&gt;Tejaswini Niranjana and Kuan-Hsing Chen&lt;/td&gt;
&lt;td&gt;Venue: 1 Shanti Road&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 05&lt;br /&gt;(Sun)&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;HOLIDAY&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 06&lt;br /&gt;(Mon)&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;Session 3: Nationalism and Modernity&lt;br /&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;
&lt;p&gt;Instructor: Milind Wakankar&lt;/p&gt;
&lt;p&gt;Student Presentations: &lt;br /&gt;Mai Thi Thu&lt;br /&gt;Baidurya Chakrabarti&lt;br /&gt;Zhang, Bing&lt;br /&gt;Musab Iqbal&lt;br /&gt;Meng Hsien Lu&lt;/p&gt;
&lt;/td&gt;
&lt;td&gt;
&lt;p&gt;On Nation – Zhang Tai Yan&lt;br /&gt;&lt;br /&gt;Nationalism in India – &lt;br /&gt;Rabindranath Tagore&lt;br /&gt;&lt;br /&gt;New Dominant Ideology and Changes of Urban Space in Today's Shanghai – Wang Xiaoming&lt;br /&gt;&lt;br /&gt;The Twilight of Certitudes: Secularism, Hindu Nationalism and Other Masks of Deculturation – Ashis Nandy&lt;/p&gt;
&lt;p&gt;A National Culture for Pakistan: the political economy of a debate – Saadia Toor&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Instructor: Madhuja M&lt;br /&gt;Student Presentations:&lt;br /&gt;Pan Yifan&lt;br /&gt;Zhang Zhihui&lt;br /&gt;Se Young Oh&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;EVENING SALON&lt;/td&gt;
&lt;td&gt;Stephen Chah and Ashish Rajadhyaksha&lt;/td&gt;
&lt;td&gt;Venue: Centre for Internet and Society&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 07&lt;br /&gt;(Tue)&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;Session 4: Culture and Economy&lt;/td&gt;
&lt;td&gt;Instructors: &lt;br /&gt;Radhika P&lt;br /&gt;Raghu Tenkayala&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;
&lt;p&gt;In the Margin of the Capital: From ‘Tjerita Boedjang Bingoeng’ to ‘Si Doel anak sekolahan’&lt;/p&gt;
&lt;p&gt;The Emergent Culture of Consumption – Chua Beng Huat&lt;/p&gt;
&lt;p&gt;‘Bollywood’ 2004; When Was Bollywood – Ashish Rajadhyaksha&lt;/p&gt;
&lt;p&gt;Peasant Cultures of the 21st Century – Partha Chatterjee&lt;/p&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00&lt;/td&gt;
&lt;td&gt;Student Presentations&lt;/td&gt;
&lt;td&gt;Chan Ka Yi&lt;br /&gt;Kim Yoon Young&lt;br /&gt;Tanna Shilpa Shirishkumar&lt;br /&gt;Ruchi Jaggi&lt;br /&gt;Haesook Yong&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 08&lt;br /&gt;(Wed)&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;Session 5: Gender and Culture&lt;/td&gt;
&lt;td&gt;Instructors: &lt;br /&gt;Navaneetha Mokkil &lt;br /&gt;Nitya Vasudevan&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Why Culture Matters – Tejaswini Niranjana&lt;br /&gt;&lt;br /&gt;Prostitutes Parasites and the house of state feminism – Naifei Ding&lt;br /&gt;&lt;br /&gt;Women and Freedom – Firdous Azim&lt;br /&gt;&lt;br /&gt;Letters to the Editor: The domestic violence act and conflict&lt;br /&gt;&lt;br /&gt;Spectralization of the Rural: Reinterpreting the labour mobility of rural young women in post-Mao China – Yan Hairong&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00&lt;/td&gt;
&lt;td&gt;Student Presentations&lt;/td&gt;
&lt;td&gt;Elmo I-Che Hsu&lt;br /&gt;Pang Ka Wei (Janet)&lt;br /&gt;Li-Fang Lai&lt;br /&gt;Kris Yu-Shiuan Chi&lt;br /&gt;Samia Vasa&lt;br /&gt;Shwetha D&lt;br /&gt;Ryu M-Rye&lt;br /&gt;Sabreena Ahmed&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;6:00-8:00&lt;/td&gt;
&lt;td&gt;EVENING SALON&lt;/td&gt;
&lt;td&gt;Firdous Azim and Naifei Ding/Siddharth/Arvind in conversation&lt;/td&gt;
&lt;td&gt;Venue: Alternative Law Forum&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 09&lt;br /&gt;(Thurs)&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;FIELD TRIP&lt;/td&gt;
&lt;td&gt;SURESH JAYARAM – Pettai Tour&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 10&lt;br /&gt;(Fri)&lt;/td&gt;
&lt;td&gt;10:00-1:00&lt;/td&gt;
&lt;td&gt;Session 6: Understanding Popular Cultural Practice&lt;/td&gt;
&lt;td&gt;Instructors: &lt;br /&gt;Namita Malhotra&lt;br /&gt;Nishant Shah&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;Hong Kong Action film in the Indian B Circuit – SV Srinivas&lt;br /&gt;&lt;br /&gt;Inter-Asia Comparative Framework: Postcolonial Film Historiography in Taiwan and South Korea Kim Soyoung&lt;br /&gt;&lt;br /&gt;Fan Bhakti and Subaltern Sovereignty – Madhava Prasad&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;3:00-5:00&lt;/td&gt;
&lt;td&gt;Student Presentations&lt;/td&gt;
&lt;td&gt;Samhita Sunya&lt;br /&gt;Khatija Sana Khader&lt;br /&gt;Ayesha Maria Mualla&lt;br /&gt;Antoreep Sengupta&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;Aug 11&lt;br /&gt;(Sat)&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;td&gt;UNWORKSHOP DAY (Writing)&lt;/td&gt;
&lt;td&gt;Evening: Final Party&lt;/td&gt;
&lt;td&gt;&lt;br /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;p&gt;&lt;b&gt; &lt;/b&gt;&lt;/p&gt;
&lt;p&gt;&lt;b&gt;Optional &lt;/b&gt;&lt;b&gt;Courses:&lt;/b&gt;&lt;b&gt; &lt;/b&gt;2 Additional 4-days parallel  Courses shall be offered to participants interested in specialised  inquiries of their research practice.&lt;b&gt; &lt;/b&gt;&lt;/p&gt;
&lt;p&gt;&lt;b&gt;Course&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;A.&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;The&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Digital&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Subject:&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Science,&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Technology&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;and&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Society&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;in&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Asia&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Course Coordinator: Nishant Shah&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt;Course Instructors: Nishant Shah, Lawrence Liang and Ashish Rajadhyaksha&lt;/li&gt;
&lt;/ul&gt;
&lt;p&gt;&lt;b&gt; &lt;/b&gt;&lt;/p&gt;
&lt;p&gt;&lt;b&gt;Course&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;B.&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Research&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Seminar&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;on&lt;/b&gt;&lt;b&gt; &lt;/b&gt;&lt;b&gt;Technology, Culture &amp;amp; the Body&lt;/b&gt;&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;Course Coordinator: Nitya Vasudevan&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt;Course Instructors: Tejaswini Niranjana, DING Naifei, Audrey Yue, Wing-Kwong Wong, Hsing-Wen Chang, Nitya Vasudevan&lt;/li&gt;
&lt;/ul&gt;
&lt;div id="_mcePaste"&gt;
&lt;p class="TableContents"&gt;&lt;span&gt;Nandy&lt;/span&gt;&lt;/p&gt;
&lt;p class="TableContents"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;span&gt;A National Culture for Pakistan: the political economy of a debate – Saadia Toor&lt;/span&gt;&lt;/div&gt;
&lt;p class="TableContents"&gt;&lt;span&gt;Nandy&lt;/span&gt;&lt;/p&gt;
&lt;p class="TableContents"&gt;&lt;span&gt; &lt;/span&gt;&lt;/p&gt;
&lt;p&gt;&lt;span&gt;A National Culture for Pakistan: the political economy of a debate – Saadia Toor&lt;/span&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/iacs-summer-school-2012'&gt;https://cis-india.org/internet-governance/iacs-summer-school-2012&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Event Type</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Research</dc:subject>
    

   <dc:date>2012-08-02T13:23:14Z</dc:date>
   <dc:type>Event</dc:type>
   </item>




</rdf:RDF>
