<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:syn="http://purl.org/rss/1.0/modules/syndication/" xmlns="http://purl.org/rss/1.0/">




    



<channel rdf:about="https://cis-india.org/internet-governance/blog/online-anonymity/search_rss">
  <title>We are anonymous, we are legion</title>
  <link>https://cis-india.org</link>
  
  <description>
    
            These are the search results for the query, showing results 101 to 115.
        
  </description>
  
  
  
  
  <image rdf:resource="https://cis-india.org/logo.png"/>

  <items>
    <rdf:Seq>
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/governing-id-india2019s-unique-identity-programme"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/governing-id-2028use-of-digital-id-for-verification"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/governing-id-a-framework-for-evaluation-of-digital-identity"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/governing-id-introducing-our-evaluation-framework"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/divergence-between-the-general-data-protection-regulation-and-the-personal-data-protection-bill-2019"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/content-takedown-and-users-rights-1"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/comments-to-the-personal-data-protection-bill-2019"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/events/how-to-shutdown-internet-shutdowns"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/automated-facial-recognition-systems-and-the-mosaic-theory-of-privacy-the-way-forward"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/extra-territorial-surveillance-and-the-incapacitation-of-human-rights"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/article-19-akriti-bopanna-and-ephraim-percy-kenyanito-december-16-2019-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/call-for-comments-model-security-standards-for-the-indian-fintech-industry"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/ietf106"/>
        
    </rdf:Seq>
  </items>

</channel>


    <item rdf:about="https://cis-india.org/internet-governance/governing-id-india2019s-unique-identity-programme">
    <title>Governing ID: India’s Unique Identity Programme</title>
    <link>https://cis-india.org/internet-governance/governing-id-india2019s-unique-identity-programme</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;div class="content"&gt;
&lt;p&gt;In our second case-study, we use our Evaluation Framework for Digital ID to assess India’s Unique Identity Programme.&lt;/p&gt;
&lt;p&gt;Read the &lt;a class="external-link" href="https://digitalid.design/evaluation-framework-case-studies/india.html"&gt;case-study&lt;/a&gt; or download as &lt;a href="https://cis-india.org/internet-governance/digital-id-india-case-study" class="internal-link" title="Digital ID India Case Study"&gt;PDF&lt;/a&gt;.&lt;/p&gt;
&lt;/div&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/governing-id-india2019s-unique-identity-programme'&gt;https://cis-india.org/internet-governance/governing-id-india2019s-unique-identity-programme&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Vrinda Bhandari</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital ID</dc:subject>
    
    
        <dc:subject>Digital Identity</dc:subject>
    

   <dc:date>2020-03-02T11:38:51Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/governing-id-2028use-of-digital-id-for-verification">
    <title>Governing ID:  Use of Digital ID for Verification</title>
    <link>https://cis-india.org/internet-governance/blog/governing-id-2028use-of-digital-id-for-verification</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;p&gt;This is the first in a series of case studies, using our recently-published &lt;a href="https://digitalid.design/evaluation-framework-02.html"&gt;Evaluation Framework for Digital ID&lt;/a&gt;. It looks at the use of digital identity programmes for the purpose of verification, often using the process of deduplication.&lt;/p&gt;
&lt;p&gt;&lt;img src="https://cis-india.org/internet-governance/image-governing-id-use-of-digital-id-for-verification/" alt="null" width="100%" /&gt;&lt;/p&gt;
Read the &lt;a class="external-link" href="https://digitalid.design/evaluation-framework-case-studies/verification.html"&gt;case-study&lt;/a&gt; or download as &lt;a href="https://cis-india.org/internet-governance/use-of-digital-id-for-verification" class="internal-link" title="Use of Digital ID for Verification"&gt;PDF.&lt;/a&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/governing-id-2028use-of-digital-id-for-verification'&gt;https://cis-india.org/internet-governance/blog/governing-id-2028use-of-digital-id-for-verification&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Shruti Trikanad</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital ID</dc:subject>
    
    
        <dc:subject>Digital Identity</dc:subject>
    

   <dc:date>2020-03-02T11:16:19Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/governing-id-a-framework-for-evaluation-of-digital-identity">
    <title>Governing ID: A Framework for Evaluation of Digital Identity</title>
    <link>https://cis-india.org/internet-governance/blog/governing-id-a-framework-for-evaluation-of-digital-identity</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;p&gt;As governments across the globe implement new and foundational 
digital identification systems (Digital ID), or modernize existing ID 
programs, there is an urgent need for more research and discussion about
 appropriate uses of Digital ID systems. This significant momentum for 
creating Digital ID has been accompanied with concerns about privacy, 
surveillance and exclusion harms of state-issued Digital IDs in several 
parts of the world, resulting in campaigns and litigations in countries,
 such as UK, India, Kenya, and Jamaica. Given the sweeping range of 
considerations required to evaluate Digital ID projects, it is necessary
 to formulate evaluation frameworks that can be used for this purpose.&lt;/p&gt;
&lt;p&gt;This work began with the question of what the appropriate uses
 of Digital ID can be, but through the research process, it became clear
 that the question of use cannot be divorced from the fundamental 
attributes of Digital ID systems and their governance structures. This 
framework provides tests, which can be used to evaluate the governance 
of Digital ID across jurisdictions, as well as determine whether a 
particular use of Digital ID is legitimate. Through three kinds of 
checks — Rule of Law tests, Rights based tests, and Risks based tests — 
this scheme is a ready guide for evaluation of Digital ID.&lt;/p&gt;
&lt;p&gt;&lt;img src="https://cis-india.org/internet-governance/image-governing-id-principles-for-evalution/" alt="null" width="100%" /&gt;&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;View the &lt;a class="external-link" href="https://digitalid.design/evaluation-framework-02.html"&gt;framework&lt;/a&gt; or download as&amp;nbsp;&lt;a href="https://cis-india.org/internet-governance/governing-id-principles-for-evalution" class="internal-link" title="Governing ID: Principles for Evalution"&gt;PDF&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/governing-id-a-framework-for-evaluation-of-digital-identity'&gt;https://cis-india.org/internet-governance/blog/governing-id-a-framework-for-evaluation-of-digital-identity&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Vrinda Bhandari, Shruti Trikanad, and Amber Sinha</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital ID</dc:subject>
    
    
        <dc:subject>Digital Identity</dc:subject>
    

   <dc:date>2020-03-02T13:22:43Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/governing-id-introducing-our-evaluation-framework">
    <title>Governing ID: Introducing our Evaluation Framework</title>
    <link>https://cis-india.org/internet-governance/blog/governing-id-introducing-our-evaluation-framework</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;div class="content"&gt;
&lt;p&gt;With the rise of national digital identity systems (Digital ID) across the world, there is a growing need to examine their impact on human rights. In several instances, national Digital ID programmes started with a specific scope of use, but have since been deployed for different applications, and in different sectors. This raises the question of how to determine appropriate and inappropriate uses of Digital ID. In April 2019, our research began with this question, but it quickly became clear that a determination of the legitimacy of uses hinged on the fundamental attributes and governing structure of the Digital ID system itself. Our evaluation framework is intended as a series of questions against which Digital ID may be tested. We hope that these questions will inform the trade-offs that must be made while building and assessing identity programmes, to ensure that human rights are adequately protected.&lt;/p&gt;
&lt;h4&gt;Rule of Law Tests&lt;/h4&gt;
&lt;p&gt;Foundational Digital ID must only be implemented along with a 
legitimate regulatory framework that governs all aspects of Digital ID, 
including its aims and purposes, the actors who have access to it, etc. 
In the absence of this framework, there is nothing that precludes 
Digital IDs from being leveraged by public and private actors for 
purposes outside the intended scope of the programme. Our rule of law 
principles mandate that the governing law should be enacted by the 
legislature, be devoid of excessive delegation, be clear and accessible 
to the public, and be precise and limiting in its scope for discretion. 
These principles are substantiated by the criticism that the Kenyan 
Digital ID, the Huduma Namba, was met with when it was legalized through
 a Miscellaneous Amendment Act, meant only for small or negligible 
amendments and typically passed without any deliberation. These set of 
tests respond to the haste with which Digital ID has been implemented, 
often in the absence of an enabling law which adequately addresses its 
potential harms.&lt;/p&gt;
&lt;h4&gt;Rights based Tests&lt;/h4&gt;
&lt;p&gt;Digital ID, because of its collection of personal data and 
determination of eligibility and rights of users, intrinsically involves
 restrictions on certain fundamental rights. The use of Digital ID for 
essential functions of the State, including delivery of benefits and 
welfare, and maintenance of civil and sectoral records, enhance the 
impact of these restrictions. Accordingly, the entire identity 
framework, including its architecture, uses, actors, and regulators, 
must be evaluated at every stage against the rights it is potentially 
violating. Only then will we be able to determine if such violation is 
necessary and proportionate to the benefits it offers. In Jamaica, the 
National Identification and Registration Act, which mandated citizens’ 
biometric enrolment at the risk of criminal sanctions, was held to be a 
disproportionate violation of privacy, and therefore unconstitutional.&lt;/p&gt;
&lt;h4&gt;Risk based Tests&lt;/h4&gt;
&lt;p&gt;Even with a valid rule of law framework that seeks to protect 
rights, the design and use of Digital ID must be based on an analysis of
 the risks that the system introduces. This could take the form of 
choosing between a centralized and federated data-storage framework, 
based on the effects of potential failure or breach, or of restricting 
the uses of the Digital ID to limit the actors that will benefit from 
breaching it. Aside from the design of the system, the regulatory 
framework that governs it should also be tailored to the potential risks
 of its use. The primary rationale behind a risk assessment for an 
identity framework is that it should be tested not merely against 
universal metrics of legality and proportionality, but also against an 
examination of the risks and harms it poses. Implicit in a risk based 
assessment is also the requirement of implementing a responsive 
mitigation strategy to the risks identified, both while creating and 
governing the identity programme.&lt;/p&gt;
&lt;p&gt;Digital ID programmes create an inherent power imbalance 
between the State and its residents because of the personal data they 
collect and the consequent determination of significant rights, 
potentially creating risks of surveillance, exclusion, and 
discrimination. The accountability and efficiency gains they promise 
must not lead to hasty or inadequate implementation.&lt;/p&gt;
&lt;/div&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/governing-id-introducing-our-evaluation-framework'&gt;https://cis-india.org/internet-governance/blog/governing-id-introducing-our-evaluation-framework&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Shruti Trikanad</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital ID</dc:subject>
    
    
        <dc:subject>Digital Identity</dc:subject>
    

   <dc:date>2020-03-02T08:03:49Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/divergence-between-the-general-data-protection-regulation-and-the-personal-data-protection-bill-2019">
    <title>Divergence between the General Data Protection Regulation and the Personal Data Protection Bill, 2019</title>
    <link>https://cis-india.org/internet-governance/blog/divergence-between-the-general-data-protection-regulation-and-the-personal-data-protection-bill-2019</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;p&gt;Our note on the divergence between the General Data Protection Regulation and the Personal Data Protection Bill can be downloaded as a PDF &lt;a href="https://cis-india.org/internet-governance/divergence-between-the-gdpr-and-pdp-bill-2019" class="internal-link" title="Divergence between the GDPR and PDP Bill 2019"&gt;here&lt;/a&gt;.&lt;/p&gt;
&lt;p&gt;The European Union’s General Data
Protection Regulation (GDPR), replacing the 1995 EU Data Protection Directive
came into effect in May 2018. It harmonises the data protection regulations
across the European Union. In India, the Ministry of Electronics and
Information Technology had constituted a Committee of Experts (chaired by
Justice Srikrishna) to frame recommendations for a data protection framework in
India. The Committee submitted its report and a draft Personal Data Protection
Bill in July 2018 (2018 Bill). Public comments were sought on the bill till
October 2018. The Central Government revised the Bill and introduced the
revised version of the Personal Data Protection Bill (PDP Bill) on December 11,
2019 in the Lok Sabha.&lt;/p&gt;
&lt;p&gt;The PDP Bill has incorporated certain
aspects of the GDPR, such as requirements for notice to be given to the data
principal, consent for processing of data, establishment of a data protection
authority, etc. However, there are some differences and in this note we have highlighted
the areas of divergence between the two. It only includes
provisions which are common to the GDPR and the PDP Bill. It does not include
the provisions on (i) Appellate Tribunal, (ii) Finance, Account and Audit; and
(iii) Non- Personal Data.&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/divergence-between-the-general-data-protection-regulation-and-the-personal-data-protection-bill-2019'&gt;https://cis-india.org/internet-governance/blog/divergence-between-the-general-data-protection-regulation-and-the-personal-data-protection-bill-2019&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Pallavi Bedi</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Data Protection</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2020-02-21T11:08:50Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/content-takedown-and-users-rights-1">
    <title>Content takedown and users' rights</title>
    <link>https://cis-india.org/internet-governance/blog/content-takedown-and-users-rights-1</link>
    <description>
        &lt;b&gt;After Shreya Singhal v Union of India, commentators have continued to question the constitutionality of the content takedown regime under Section 69A of the IT Act (and the Blocking Rules issued under it). There has also been considerable debate around how the judgement has changed this regime: specifically about (i) whether originators of content are entitled to a hearing, (ii) whether Rule 16 of the Blocking Rules, which mandates confidentiality of content takedown requests received by intermediaries from the Government, continues to be operative, and (iii) the effect of Rule 16 on the rights of the originator and the public to challenge executive action. In this opinion piece, we attempt to answer some of these questions.&lt;/b&gt;
        
&lt;p style="text-align: justify;" class="normal"&gt;&amp;nbsp;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;This article was first &lt;a class="external-link" href="http://https://theleaflet.in/content-takedown-and-users-rights/"&gt;published&lt;/a&gt; at the Leaflet. It has subsequently been republished by &lt;a class="external-link" href="https://scroll.in/article/953146/how-india-is-using-its-information-technology-act-to-arbitrarily-take-down-online-content"&gt;Scroll.in&lt;/a&gt;, &lt;a class="external-link" href="https://kashmirobserver.net/2020/02/15/content-takedown-and-users-rights/"&gt;Kashmir Observer&lt;/a&gt; and the &lt;a class="external-link" href="https://cyberbrics.info/content-takedown-and-users-rights/"&gt;CyberBRICS blog&lt;/a&gt;.&amp;nbsp;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;strong&gt;&lt;br /&gt;&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;strong&gt;Introduction&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Last year, several Jio users from different states&amp;nbsp;&lt;a href="https://www.medianama.com/2019/03/223-indiankanoon-jio-block/"&gt;reported&lt;/a&gt;&amp;nbsp;that sites like Indian Kanoon, Reddit and Telegram were inaccessible through their connections. While attempting to access the website, the users were presented with a notice that the websites were blocked on orders from the Department of Telecommunications (DoT). When contacted by the founder of Indian Kanoon, Reliance Jio&amp;nbsp;&lt;a href="https://in.reuters.com/article/us-india-internet-idINKCN1RF14D"&gt;stated&lt;/a&gt;&amp;nbsp;that the website had been blocked on orders of the government, and that the order had been rescinded the same evening. However, in response to a Right to Information (RTI) request, the DoT&amp;nbsp;&lt;a href="https://twitter.com/indiankanoon/status/1218193372210323456"&gt;said&lt;/a&gt;&amp;nbsp;they had no information about orders relating to the blocking of Indian Kanoon.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Alternatively, consider that the Committee to Protect Journalists (CPJ)&amp;nbsp;&lt;a href="https://cpj.org/blog/2019/10/india-opaque-legal-process-suppress-kashmir-twitter.php"&gt;expressed concern&lt;/a&gt;&amp;nbsp;last year that the Indian government was forcing Twitter to suspend accounts or remove content relating to Kashmir. They reported that over the last two years, the Indian government suppressed a substantial amount of information coming from the area, and prevented Indians from accessing more than five thousand tweets.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;These instances are&amp;nbsp;&lt;a href="https://www.hindustantimes.com/analysis/to-preserve-freedoms-online-amend-the-it-act/story-aC0jXUId4gpydJyuoBcJdI.html"&gt;symptomatic&lt;/a&gt;&amp;nbsp;of a larger problem of opaque and arbitrary content takedown in India, enabled by the legal framework under the Information Technology (IT) Act. The Government derives its powers to order intermediaries (entities storing or transmitting information on behalf of others, a definition which includes internet service providers and social media platforms alike) to block online resources through&amp;nbsp;&lt;a href="https://indiankanoon.org/doc/10190353/"&gt;section 69A&lt;/a&gt;&amp;nbsp;of the IT Act and the&amp;nbsp;&lt;a href="https://meity.gov.in/writereaddata/files/Information%20Technology%20%28%20Procedure%20and%20safeguards%20for%20blocking%20for%20access%20of%20information%20by%20public%29%20Rules%2C%202009.pdf"&gt;rules&lt;/a&gt;&amp;nbsp;[“the blocking rules”] notified thereunder. Apart from this,&amp;nbsp;&lt;a href="https://indiankanoon.org/doc/844026/"&gt;section 79&lt;/a&gt;&amp;nbsp;of the IT Act and its allied rules also prescribe a procedure for content removal.&amp;nbsp;&lt;a href="https://cis-india.org/internet-governance/files/a-deep-dive-into-content-takedown-frames"&gt;Conversations&lt;/a&gt;&amp;nbsp;with one popular intermediary revealed that the government usually prefers to use its powers under section 69A, possibly because of the opaque nature of the procedure that we highlight below.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Under section 69A, a content removal request can be sent by authorised personnel in the Central Government not below the rank of a Joint Secretary.&amp;nbsp; The grounds for issuance of blocking orders under section 69A are: “&lt;em&gt;the interest of the sovereignty and integrity of India, defence of India, the security of the state, friendly relations with foreign states or public order or for preventing incitement to the commission of any cognisable offence relating to the above.&lt;/em&gt;” Specifically, the blocking rules envisage the process of blocking to be largely executive-driven, and require strict confidentiality to be maintained around the issuance of blocking orders. This shrouds content takedown orders in a cloak of secrecy, and makes it impossible for users and content creators to ascertain the legitimacy or legality of the government action in any instance of blocking.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;strong&gt;Issues&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The Supreme Court had been called to determine the constitutional validity of section 69A and the allied rules in&amp;nbsp;&lt;a href="https://indiankanoon.org/doc/110813550/"&gt;&lt;em&gt;Shreya Singhal v Union of India&lt;/em&gt;&lt;/a&gt;. The petitioners had contended that as per the procedure laid down by these rules, there was no guarantee of pre-decisional hearing afforded to the originator of the information. Additionally, the petitioners pointed out that the safeguards built into section 95 and 96 of the Code of Criminal Procedure (CrPC), which allow state governments to ban publications and persons to initiate legal challenges to those actions respectively, were absent from the blocking procedures. Lastly, the petitioners assailed rule 16 of the blocking rules, which mandated confidentiality of blocking procedures, on the grounds that it was affecting their fundamental rights.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The Court, however, found little merit in these arguments. Specifically, the Court found that section 69A was narrowly drawn and had sufficient procedural safeguards, which included the grounds of issuance of a blocking order being specifically drawn, and mandating that the reasons of the website blocking be in writing, thus making it amenable to judicial review. Further, the Court also found that the provision of setting up of a review committee saved the law from being constitutional infirmity. In the Court’s opinion, the mere absence of additional safeguards, as the ones built into the CrPC, did not mean that the law was unconstitutional.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;But do the ground realities align with the Court’s envisaged implementation of these principles? Apar Gupta, a counsel for the petitioners,&amp;nbsp;&lt;a href="https://indianexpress.com/article/opinion/columns/but-what-about-section-69a/"&gt;pointed&lt;/a&gt;&amp;nbsp;out that there was no recorded instance of pre-decisional hearing being granted to show that this safeguard contained in the rules was&amp;nbsp; actually being implemented. However, Gautam Bhatia&amp;nbsp;&lt;a href="https://indconlawphil.wordpress.com/2015/03/25/the-supreme-courts-it-act-judgment-and-secret-blocking/"&gt;read&lt;/a&gt;&amp;nbsp;&lt;em&gt;Shreya Singhal&amp;nbsp;&lt;/em&gt;to make an important advance: that the right of hearing be mandatorily extended to the ‘originator’, i.e. the content creator.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Additionally, Bhatia also noted that the Court, while upholding the constitutionality of the procedure under section 69A, held that the “&lt;em&gt;reasons have to be recorded in writing in such blocking order so that they may be assailed in a writ petition under Article 226 of the Constitution.&lt;/em&gt;”&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;There are two important takeaways from this.&amp;nbsp;&lt;em&gt;Firstly&lt;/em&gt;, he argued that the broad contours of the judgment invoke an established constitutional doctrine — that the fundamental right under Article 19(1)(a) does not merely include the right of expression, but also the&amp;nbsp;&lt;em&gt;right of access to information.&amp;nbsp;&lt;/em&gt;Accordingly, the right of challenging a blocking order was not only vested in the originator or the concerned intermediary, but may rest with the general public as well. And&amp;nbsp;&lt;em&gt;secondly&lt;/em&gt;, by the doctrine of necessary implication, it followed that for the general public to challenge any blocking order under Article 226, the blocking orders must be made public. While Bhatia concedes that public availability of blocking orders may be an over-optimistic reading of the judgment, recent events suggest that even the commonly-expected result, i.e. that the content creators having the right to a hearing, has not been implemented by the Government.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Consider the&amp;nbsp;&lt;a href="https://internetfreedom.in/delhi-hc-issues-notice-to-the-government-for-blocking-satirical-dowry-calculator-website/"&gt;blocking&lt;/a&gt;&amp;nbsp;of the satirical website DowryCalculator.com in September 2019 on orders from the government. The website displayed a calculator that suggests a ‘dowry’ depending on the salary and education of a prospective groom: even if someone misses the satire, the contents of the website are not immediately relatable to any grounds of removal listed under section 69A of the IT Act.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&amp;nbsp;Tanul Thakur, the creator of the website, was not granted a hearing despite the fact that he had publicly claimed the ownership of the website at various times and that the website had been covered widely by the press. The information associated with the domain name also publicly lists Thakur’s name and contact information. Clearly, the government made no effort to contact Thakur when passing the order. Perhaps even more worryingly, when he&amp;nbsp;&lt;a href="https://internetfreedom.in/delhi-hc-issues-notice-to-the-government-for-blocking-satirical-dowry-calculator-website/"&gt;tried&lt;/a&gt;&amp;nbsp;to access a copy of the blocking order by filing a RTI, the MeitY cited the confidentiality rule to deny him the information.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;This incident documents a fundamental problem plaguing the rules: the confidentiality clause is still being used to deny disclosure of key information on content takedown orders. The government has also used the provision to deny citizens a list of blocked websites , as responses to RTI requests have proven&amp;nbsp;&lt;a href="https://cis-india.org/internet-governance/blog/rti-application-to-bsnl-for-the-list-of-websites-blocked-in-india"&gt;time&lt;/a&gt;&amp;nbsp;and&amp;nbsp;&lt;a href="https://sflc.in/deity-provides-list-sites-blocked-2013-withholds-orders"&gt;again&lt;/a&gt;.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Clearly, the Supreme Court’s rationale in considering Section 69A and the blocking rules as constitutional is not one that is implemented in reality. The confidentiality clause is preventing legal challenges to content blocking in totality: content creators are unable access the orders, and hence are unable to understand the executive’s reasoning in ordering their content to be blocked from public access.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;As we noted earlier, the grounds of issuing a blocking order under section 69A pertain to certain reasonable restrictions on expression permitted by Article 19(2), which are couched in broad terms. The government’s implementation of section 69A and the rules make it impossible for any judicial review or accountability on the conformity of blocking orders &amp;nbsp;with the mentioned grounds under the rules, or any reasonable restriction at all.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;strong&gt;The Way Forward&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;From the opacity of proceedings under the law, to the lack of information regarding the same on public domain, the Indian content takedown regime leaves a lot to be desired from both the government and intermediaries at play.&amp;nbsp;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;First, we believe the Supreme Court’s decision in&amp;nbsp;&lt;em&gt;Shreya Singhal v. Union of India&lt;/em&gt;&amp;nbsp;casts an obligation on the government to attempt to contact the content creator if they are passing a content takedown order to an intermediary.&amp;nbsp;&lt;em&gt;Second&lt;/em&gt;, even if the content creator is unavailable for a hearing at that instance, the confidentiality clause should not be used to prevent future disclosure of information to the content creator, so that affected citizens can access and challenge these orders.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;While we wait for legal reform, intermediaries can also step up to ensure the rights of users online are upheld. On receiving formal orders, intermediaries should&amp;nbsp;&lt;a href="https://cis-india.org/internet-governance/blog/torsha-sarkar-suhan-s-and-gurshabad-grover-october-30-2019-through-the-looking-glass"&gt;assess&lt;/a&gt;&amp;nbsp;the legality of the received request. This should involve ensuring that only authorised agencies and personnel have sent the content removal orders, that the order specifically mentions what provision the government is exercising the power under, and that the content removal requests relate to the grounds of removal that are permissible under section 69A. For instance, intermediaries should refuse to entertain content removal requests under section 69A of the IT Act if they relate to obscenity, a ground not covered by the provision.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The representatives of the intermediary should also push for the committee to grant a hearing to the content creator. Here, the intermediary can act as a liaison between the uploader and the governmental authorities.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The Supreme Court’s recent decision in&amp;nbsp;&lt;a href="https://indiankanoon.org/doc/82461587/"&gt;&lt;em&gt;Anuradha Bhasin v. Union of India&lt;/em&gt;&lt;/a&gt;&lt;em&gt;&amp;nbsp;&lt;/em&gt;offers a glimmer of hope for user rights online&lt;em&gt;.&amp;nbsp;&lt;/em&gt;While the case primarily challenged the orders imposing section 144 of the CrPC and a communication blockade in Jammu and Kashmir, the final decision does affirm the fundamental principle that government-imposed restrictions on the freedom of expression and assembly must be made available to the public and affected parties to enable challenges in a court of law.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&amp;nbsp;The judiciary has yet another opportunity to consider the provision and the rules: late last year, Tanul Thakur&amp;nbsp;&lt;a href="https://internetfreedom.in/delhi-hc-issues-notice-to-the-government-for-blocking-satirical-dowry-calculator-website/"&gt;approached&lt;/a&gt;&amp;nbsp;the Delhi High Court to challenge the orders passed by the government to ISPs to block his website. One hopes that the future holds robust reforms to the content takedown regime.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&amp;nbsp;We live in an era where the ebb and flow of societal discourse is increasingly channeled through intermediaries on the internet. In the absence of a mature, balanced and robust framework that enshrines the rule of law, we risk arbitrary modulation of the marketplace of ideas by the executive.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;&amp;nbsp;&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;Torsha Sakar and Gurshabad Grover are researchers at the Centre for Internet and Society.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;Disclosure: The Centre for Internet and Society is a recipient of research grants from Facebook and Google.&lt;/em&gt;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/content-takedown-and-users-rights-1'&gt;https://cis-india.org/internet-governance/blog/content-takedown-and-users-rights-1&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Torsha Sarkar, Gurshabad Grover</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Freedom</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2020-02-17T05:18:25Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/comments-to-the-personal-data-protection-bill-2019">
    <title> Comments to the Personal Data Protection Bill 2019</title>
    <link>https://cis-india.org/internet-governance/blog/comments-to-the-personal-data-protection-bill-2019</link>
    <description>
        &lt;b&gt;The Personal Data Protection Bill, 2019 was introduced in the Lok Sabha on December 11, 2019. &lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h4&gt;Please view our general comments below, or download as PDF &lt;a href="https://cis-india.org/accessibility/blog/cis-general-comments-to-the-pdp-bill-2019" class="internal-link" title="CIS' General Comments to the PDP Bill 2019"&gt;here&lt;/a&gt;.&lt;/h4&gt;
&lt;h4&gt;Our comments and recommendations can be downloaded as PDF &lt;a href="https://cis-india.org/accessibility/blog/cis-comments-pdp-bill-2019" class="internal-link" title="CIS Comments PDP Bill 2019"&gt;here&lt;/a&gt;.&lt;/h4&gt;
&lt;h4&gt;We have also prepared an annotated version of the Bill, where our detailed comments and recommendations can be viewed alongside the Bill, available as PDF &lt;a href="https://cis-india.org/accessibility/blog/annotated-ver-pdp-bill-2019" class="internal-link" title="Annotated ver PDP Bill 2019"&gt;here&lt;/a&gt;.&lt;/h4&gt;
&lt;hr /&gt;
&lt;h2&gt;General Comments&lt;/h2&gt;
&lt;h3&gt;1. Executive notification cannot abrogate fundamental rights &lt;br /&gt;&lt;/h3&gt;
&lt;p&gt;In 2017, the Supreme Court in K.S. Puttaswamy v Union of India [1] held the right to privacy to be a fundamental right. While this right is subject to reasonable restrictions, the restrictions have to meet a three fold requirement, namely (i) existence of a law; (ii) legitimate state aim; (iii) proportionality.Under the 2018 Bill, the exemption to government agencies for processing of personal data from the provisions of the Bill in the ‘interest of the security of the State’ [2] was subject to a law being passed by Parliament. However, under Clause 35 of the present Bill, the Central Government is merely required to pass a written order exempting the government agency from the provisions of the Bill.Any restriction on the right to privacy will have to comply with the conditions prescribed in Puttaswamy I. An executive order issued by the central government authorising any agency of the government to process personal data does not satisfy the first requirement laid down by the Supreme Court in Puttaswamy I — as it is not a law passed by Parliament. The Supreme Court while deciding upon the validity of Aadhar in K.S. Puttaswamy v Union of India [3] noted that “an executive notification does not satisfy the requirement of a valid law contemplated under Puttaswamy. A valid law in this case would mean a law passed by Parliament, which is just, fair and reasonable. Any encroachment upon the fundamental right cannot be sustained by an executive notification.”&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;2. Exemptions under Clause 35 do not comply with the legitimacy and proportionality test&lt;/h3&gt;
&lt;p&gt;The lead judgement in Puttaswamy I while formulating the three fold test held that the restraint on privacy emanate from the procedural and content based mandate of Article 21 [4]. The Supreme Court in Maneka Gandhi v Union India [5] had clearly established that “mere prescription of some kind of procedure cannot ever meet the mandate of Article 21. The procedure prescribed by law has to be fair, just and reasonable, not fanciful,  oppressive and arbitrary” [6]. The existence of a law is the first requirement; the second requirement is that of ‘legitimate state aim’. As per the lead judgement this requirement ensures that “the nature and content of the law which imposes the restriction falls within the zone of reasonableness mandated by Article 14, which is  a guarantee against arbitrary state action” [7]. It is established that for a provision which confers upon the executive or administrative authority discretionary powers to be regarded as non-arbitrary, the provision should lay down clear and specific guidelines for the executive to exercise  the power [8]. The third test to be complied with is that the restriction should be ‘proportionate,’ i.e. the means that are adopted by the legislature are proportional to the object and needs sought to be fulfilled by the law. The Supreme Court in Modern Dental College &amp;amp; Research Centre v State of Madhya Pradesh [9] specified the components of proportionality standards —&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;A measure restricting a right must have a legitimate goal;&lt;/li&gt;
&lt;li&gt;It must be a suitable means of furthering this goal;&lt;/li&gt;
&lt;li&gt;There must not be any less restrictive, but equally effective alternative; and&lt;/li&gt;
&lt;li&gt;The measure must not have any disproportionate impact on the right holder&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;Clause 35 provides extensive grounds for the Central Government to exempt any agency from the requirements of the bill but does not specify the procedure to be followed by the agency while processing personal data under this provision. It merely states that the ‘procedure, safeguards and oversight mechanism to be followed’ will be prescribed in  the rules.The wide powers conferred on the central government without clearly specifying the procedure may be contrary to the three fold test laid down in Puttaswamy I, as it is difficult to ascertain whether a legitimate or proportionate objective is being fulfilled [10].&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;3. Limited powers of Data Protection Authority in comparison with the Central Government&lt;/h3&gt;
&lt;p&gt;In comparison with the last version of the Personal Data Protection Bill, 2018 prepared by the Committee of Experts led by Justice Srikrishna, we witness an abrogation of powers of the Data Protection Authority (Authority), to be created, in this Bill. The powers and functions that were originally intended to be performed by the Authority have now been allocated to the Central Government. For example:&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;In the 2018 Bill, the Authority had the power to notify further categories of sensitive personal data. Under the present Bill, the Central Government in consultation with the sectoral regulators has been conferred the power to do so.&lt;/li&gt;
&lt;li&gt;Under the 2018 Bill, the Authority had the sole power to determine and notify significant data fiduciaries, however, under the present Bill, the Central Government has in consultation with the Authority been given the power to notify social media intermediaries as significant data fiduciaries.&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;In order to govern data protection effectively, there is a need for a responsive market regulator with a strong mandate and resources. The political nature of the personal data also requires that the governance of data, particularly the rule-making and adjudicatory functions performed by the Authority are independent of the Executive.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;4. No clarity on data sandbox&lt;/h3&gt;
&lt;p&gt;The Bill contemplates a sandbox for “ innovation in artificial intelligence, machine-learning or any other emerging technology in public interest.” A Data Sandbox is a non-operational environment where the analyst can model and manipulate data inside the data management system. Data sandboxes have been envisioned as a secure area where only a copy of the company’s or participant companies’ data is located [11]. In essence, it refers to the scalable and creation platform which can be used to explore an enterprise’s information sets. On the other hand, regulatory sandboxes are controlled environments where firms can introduce innovations to a limited customer base within a relaxed regulatory framework, after which they may be allowed entry into the larger market after meeting certain conditions. This purportedly encourages innovation through the lowering of entry barriers by protecting newer entrants from unnecessary and burdensome regulation. Regulatory sandboxes can be interpreted as a form of responsive regulation by governments that seek to encourage innovation – they allow selected companies to experiment with solutions within an environment that is relatively free of most of the cumbersome regulations that they would ordinarily be subject to, while still subject to some appropriate safeguards and regulatory requirements. Sandboxes are regulatory tools which may be used to permit companies to innovate in the absence of heavy regulatory burdens. However, these ordinarily refer to burdens related to high barriers to entry (such as capital requirements for financial  and banking companies), or regulatory costs. In this Bill, however, the relaxing of data protection provisions for data fiduciaries would lead to restrictions of the privacy of individuals. Limitations to a fundamental rights on grounds of ‘fostering innovation’ is not a constitutional tenable position, and contradict the primary objectives of a data protection law.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;5. The primacy of ‘harm’ in the Bill ought to be reconsidered&lt;/h3&gt;
&lt;p&gt;While a harms based approach is necessary for data protection frameworks, such approaches should be restricted to the positive obligations, penal provisions and responsive regulation of the Authority. The Bill does not provide any guidance on either the interpretation of the term ‘harm,’ [12] or on the various activities covered within the definition of the term. Terms such as ‘loss of reputation or humiliation’ ‘any discriminatory treatment’ are a subjective standard and are open to varied interpretations. This ambiguity in the definition will make it difficult for the data principal to demonstrate harm and for the DPA to take necessary action as several provisions are based upon harm being caused or likely to be caused.Some of the significant provisions where ‘harm’ is a precondition for the provision to come into effect are —&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;Clause 25: Data Fiduciary is required to notify the Authority about the breach of personal data processed by the data fiduciary, if such breach is likely to cause harm to any data principal. The Authority after taking into account the severity of the harm that may be caused to the data principal will determine whether the data principal should be notified about the breach.&lt;/li&gt;
&lt;li&gt;Clause 32 (2): A data principal can file a complaint with the data fiduciary for a contravention of any of the provisions of the Act, which has caused or is likely to cause ‘harm’ to the data principal.&lt;/li&gt;&lt;li&gt;Clause 64 (1): A data principal who has suffered harm as a result of any violation of the provision of the Act by a data fiduciary, has the right to seek compensation from the data fiduciary.&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;Clause 16 (5): The guardian data fiduciary is barred from profiling, tracking or undertaking targeted advertising directed at children and undertaking any other processing of personal data that can cause significant harm to the child.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;6. Non personal data should be outside the scope of this Bill&lt;/h3&gt;
&lt;p&gt;Clause 91 (1) states that the Act does not prevent the Central Government from framing a policy for the digital economy, in so far as such policy does not govern personal data. The Central Government can, in consultation with the Authority, direct any data fiduciary  to provide any anonymised personal data or other non-personal data to enable better targeting of delivery of services or formulation of evidence based policies in any manner as may be prescribed.It is concerning that the data protection bill has specifically carved out an exception for the Central Government to frame policies for the digital economy and seems to indicate that the government plans to freely use any and all anonymized and/or non-personal data that rests with any data fiduciary that falls under the ambit of the bill to support the digital economy including for its growth, security, integrity, and prevention of misuse. It is unclear how the government, in practice, will be able to compel organizations to share this data. Further, there is a lack of clarity on the contours of the definition of non-personal data and the Bill does not define the term. It is also unclear whether the Central Government can compel the data fiduciary to transfer/share all forms of non-personal data and the rights and obligations of the data fiduciaries and data principals over such forms of data. Anonymised data refers to data which has ‘ irreversibly’ been converted into a form in which the data principal cannot be identified. However, as several instances have shown ‘ irreversible’ anonymisation is not possible. In the United States, the home addresses of taxi drivers were uncovered and in Australia individual health records were mined from anonymised medical bills [13]. In September 2019, the Ministry of Electronics and Information Technology, constituted an expert committee under the chairmanship of Kris Gopalkrishnan to study various issues relating to non-personal data and to deliberate over a data governance framework for the regulation of such data.The provision should be deleted and the scope of the bill should be limited to protection of personal data and to provide a framework for the protection of individual privacy. Until the report of the expert committee is published, the Central Government should not frame any law/regulation on the access and monetisation of non-personal/ anonymised data nor can they create a blanket provision allowing them to request such data from any data fiduciary that falls within the ambit of the bill. If the government wishes to use data resting with a data fiduciary; it must do so on a case to case basis and under formal and legal agreements with each data fiduciary.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;7. Steps towards greater decentralisation of power&lt;/h3&gt;
&lt;p&gt;We propose the following steps towards greater decentralisation of powers and devolved jurisdiction —&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;Creation of State Data Protection Authorities: A single centralised body may not be the appropriate form of such a regulator. We propose that on the lines of central and state commissions under the Right to Information Act, 2005, state data protection authorities are set up which are in a position to respond to local complaints and exercise jurisdiction over entities within their territorial jurisdictions.&lt;/li&gt;
&lt;li&gt;More involvement of industry bodies and civil society actors: In order to lessen the burden on the data protection authorities it is necessary that there is active engagement with industry bodies, sectoral regulators and civil society bodies engaged in privacy research. Currently, the Bill provides for involvement of industry or trade association, association representing the interests of data principals, sectoral regulator or statutory Authority, or an departments or ministries of the Central or State Government in the formulation of codes of practice. However, it would be useful to also have a more active participation of industry associations and civil society bodies in activities such as promoting  awareness among data fiduciaries of their obligations under this Act, promoting measures and undertaking research for innovation in the field of protection of personal data.&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;8. The Authority must be empowered to exercise responsive regulation&lt;/h3&gt;
&lt;p&gt;In a country like India, the challenge is to move rapidly from a state of little or no data protection law, and consequently an abysmal state of data privacy practices to a strong data protection regulation and a powerful regulator capable of enabling a state of robust data privacy practices. This requires a system of supportive mechanisms to the stakeholders in the data ecosystem, as well as systemic measures which enable the proactive detection of breaches. Further, keeping in mind the limited regulatory capacity in India, there is a need for the Authority to make use of different kinds of inexpensive and innovative strategies.We recommend the following additional powers for the Authority to be clearly spelt out in the Bill —&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;Informal Guidance: It would be useful for the Authority to set up a mechanism on the lines of the Security and Exchange Board of India (SEBI)’s Informal Guidance Scheme, which enables regulated entities to approach the Authority for non-binding advice on the position of law. Given that this is the first omnibus data protection law in India, and there is very little jurisprudence on the subject from India, it would be extremely useful for regulated entities to get guidance from  the regulator.&lt;/li&gt;
&lt;li&gt;Power to name and shame: When a DPA makes public the names of organisations that have seriously contravened data protection legislation, this is a practice known as “naming and shaming.”  The UK ICO and other DPAs recognise the power of publicity, as evidenced by their willingness to co-operate  with the media. The ICO does not simply post monetary penalty notices (MPNs or fines) on its websites for journalists to find, but frequently issues press releases, briefs journalists and uses social media. The ICO’s publicity statement on communicating enforcement activities states that the “ICO aims to get media coverage for  enforcement activities.”&lt;/li&gt;
&lt;li&gt;Undertakings: The UK ICO has also leveraged the threats of fines into an alternative enforcement mechanism seeking contractual undertakings from data controllers to take certain remedial steps. Undertakings have significant advantages for the regulator. Since an undertaking is a more “co-operative”solution, it is less likely that a data controller will change it. An undertaking is simpler and easier to put in place. Furthermore, the Authority can put an undertaking in place quickly as opposed to legal proceedings which are longer.&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;9. No clear roadmap for the implementation of the Bill&lt;/h3&gt;
&lt;p&gt;The 2018 Bill had specified a roadmap for the different provisions of the Bill to come into effect from the date of the Act being notified [14]. It specifically stated the time period within which the Authority had to be established and the subsequent rules and regulations notified.The present Bill does not specify any such blueprint; it does not provide any details on either when the Bill will be notified or the time period within within which the Authority shall be established and specific rules and regulations notified. Considering that 25 provisions have been deferred to rules that have to be framed by the Central Government and a further 19 provisions have been deferred to the regulations to be notified by the Authority the absence and/or delayed notification of such rules and regulations will impact the effective functioning of the Bill.The absence of any sunrise or sunset provision may disincentivise political or industrial will to support or enforce the provisions of the Bill. An example of such a lack of political will was the establishment of the Cyber Appellate Tribunal. The tribunal was established in 2006 to redress cyber fraud. However, it was virtually a defunct body from 2011 onwards when the last chairperson retired. It was eventually merged with the Telecom Dispute Settlement and Appellate Tribunal in 2017.We recommend that Bill clearly lays out a time period for the implementation of the different provisions of the Bill, especially a time frame for the establishment of the Authority. This is important to give full and effective effect to the right of privacy of the &lt;br /&gt;individual. It is also important to ensure that individuals have an effective mechanism  to enforce the right and seek recourse in case of any breach of obligations by the  data fiduciaries.For offences, we suggest a system of mail boxing where provisions and punishments are enforced in a staggered manner, for a period till the fiduciaries are aligned with the provisions of the Act. The Authority must ensure that data principals and fiduciaries have sufficient awareness of the provisions of this Bill before bringing the provisions for punishment are brought into force. This will allow the data fiduciaries to align their practices with the provisions of this new legislation and the Authority will also have time to define and determine certain provisions that the Bill has left the Authority to define. Additionally enforcing penalties for offences initially must be in a staggered process, combined with provisions such as warnings, in order to allow first time and mistaken offenders from paying a high price. This will relieve the fear of smaller companies and startups who might fear processing data for the fear of paying penalties for offences.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;10. Lack of interoperability&lt;/h3&gt;
&lt;p&gt;In its current form, a number of the provisions in the Bill will make it difficult for India’s framework to be interoperable with other frameworks globally and in the region. For example, differences between the draft Bill and the GDPR can be found in the grounds for processing,&amp;nbsp; data localization frameworks, the framework for cross border transfers, definitions of sensitive personal data, inclusion of&amp;nbsp; the undefined category of ‘critical&amp;nbsp; data’, and the roles of the authority and the central government.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;11. Legal Uncertainty&lt;/h3&gt;
&lt;p&gt;In its current structure, there are a number of provisions in the Bill that, when implemented, run the risk of creating an environment of legal uncertainty. These include: lack of definition of critical data, lack of clarity in the interpretation of the terms ‘harm’ and ‘significant harm’, ability of the government to define further categories of sensitive personal data,&amp;nbsp; inclusion of requirements for ‘social media intermediaries’, inclusion of ‘non-personal data’, framing of the requirements for data transfers, bar on processing of certain forms of biometric data as defined by the Central Government, the functioning between a consent manager and another data fiduciary, the inclusion of an AI sandbox and the definition of state. To ensure the greatest amount of protection of individual privacy rights and the protection of personal data while also enabling innovation, it is important that any data protection framework is structured and drafted in a way to provide as much legal certainty as possible.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;Endnotes&lt;/h3&gt;
&lt;p&gt;1. (2017) 10 SCC 641 (“Puttaswamy I”).&lt;/p&gt;
&lt;p&gt;2. Clause 42(1) of the 2018 Bill states that “Processing of personal data in the interests of the security of the State shall not be permitted unless it is authorised pursuant to a law, and is in accordance with the procedure established by such law, made by Parliament and is necessary for, and proportionate to such interests being achieved.”&lt;/p&gt;
&lt;p&gt;3. (2019) 1 SCC 1 (“Puttaswamy II”)&lt;/p&gt;
&lt;p&gt;4. Puttaswamy I, supra, para 180.&lt;/p&gt;
&lt;p&gt;5. (1978) 1 SCC 248.&lt;/p&gt;
&lt;p&gt;6. Ibid para 48.&lt;/p&gt;
&lt;p&gt;7. Puttaswamy I supra para 180.&lt;/p&gt;
&lt;p&gt;8. State of W.B. v. Anwar Ali Sarkar, 1952 SCR 284; Satwant Singh Sawhney v A.P.O AIR 1967 SC1836.&lt;/p&gt;
&lt;p&gt;9. (2016)7 SCC 353.&lt;/p&gt;
&lt;p&gt;10. Dvara Research “Initial Comments of Dvara Research dated 16 January 2020 on the Personal Data Protection Bill, 2019 introduced in Lok Sabha on 11 December 2019”, January 2020, https://www.dvara.com/blog/2020/01/17/our-initial-comments-on-the-personal-data-protection-bill-2019/ (“Dvara Research”).&lt;/p&gt;
&lt;p&gt;11. “A Data Sandbox for Your Company”, Terrific Data, last accessed on January 31, 2019, http://terrificdata.com/2016/12/02/3221/.&lt;/p&gt;
&lt;p&gt;12. Clause 3(20) — “harm” includes (i) bodily or mental injury; (ii) loss, distortion or theft of identity; (ii) financial loss or loss of property; (iv) loss of reputation or humiliation; (v) loss of employment; (vi) any discriminatory treatment; (vii) any subjection to blackmail or extortion; (viii) any denial or withdrawal of service,benefit or good resulting from an evaluative decision about the data principal; (ix) any restriction placed or suffered directly or indirectly on speech, movement or any other action arising out of a fear of being observed or surveilled; or (x) any observation or surveillance that is not reasonably expected by the data principal.&lt;/p&gt;
&lt;p&gt;13. Alex Hern “Anonymised data can never be totally anonymous, says study”, July 23, 2019 https://www.theguardian.com/technology/2019/jul/23/anonymised-data-never-be-anonymous-enough-study-finds.&lt;/p&gt;
&lt;p&gt;14. Clause 97 of the 2018 Bill states“(1) For the purposes of this Chapter, the term ‘notified date’ refers to the date notified by the Central Government under sub-section (3) of section 1. (2)The notified date shall be any date within twelve months from the date of enactment of this Act. (3)The following provisions shall come into force on the notified date-(a) Chapter X; (b) Section 107; and (c) Section 108. (4)The Central Government shall, no later than three months from the notified date establish the Authority. (5)The Authority shall, no later than twelve months from the notified date notify the grounds of processing of personal data in respect of the activities listed in sub-section (2) of section 17. (6)The Authority shall no, later than twelve months from the date notified date issue codes of practice on the following matters-(a) notice under section 8; (b) data quality under section 9; (c) storage limitation under section 10; (d) processing of personal data under Chapter III; (e) processing of sensitive personal data under Chapter IV; (f ) security safeguards under section 31; (g) research purposes under section 45; (h) exercise of data principal rights under Chapter VI; (i) methods of de-identification and anonymisation; (j) transparency and accountability measures under Chapter VII. (7)Section 40 shall come into force on such date as is notified by the Central Government for the purpose of that section.(8)The remaining provision of the Act shall come into force eighteen months from the notified date.”&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/comments-to-the-personal-data-protection-bill-2019'&gt;https://cis-india.org/internet-governance/blog/comments-to-the-personal-data-protection-bill-2019&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Amber Sinha, Elonnai Hickok, Pallavi Bedi, Shweta Mohandas, Tanaya Rajwade</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Data Protection</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2020-02-21T10:13:35Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/events/how-to-shutdown-internet-shutdowns">
    <title>How to Shut Down Internet Shutdowns</title>
    <link>https://cis-india.org/internet-governance/events/how-to-shutdown-internet-shutdowns</link>
    <description>
        &lt;b&gt;This talk will focus on the challenges and opportunities for research on internet shutdowns after the judgement of the Supreme Court in Anuradha Bhasin v. Union of India. Stepping beyond the judgement, there will be a wider discussion on the practice of whitelists, blocking powers of the central government.
&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;&lt;img src="https://cis-india.org/How-to-Shut-Down-Internet-Shutdowns-Details/" alt="null" width="100%" /&gt;&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;&lt;strong&gt;About the Speaker&lt;/strong&gt;&amp;nbsp;&lt;/h3&gt;
&lt;p&gt;Apar Gupta is the Executive Director of the Internet Freedom Foundation.&lt;/p&gt;
&lt;p&gt;Apar has been fighting the good fight for digital rights. While in law school almost 20 years ago, he wrote a legal commentary on the IT Act that is now in its third edition. As a lawyer in the Supreme Court, he worked on landmark cases such as on Section 66A, Intermediary Liability, Internet Shutdowns, the Right to Privacy and Privacy.&lt;/p&gt;
&lt;p&gt;He also helped create public campaigns to advance net neutrality, reform defamation laws, fight Internet shutdowns and create a privacy statute. Apar previously ran his own successful law firm, was profiled in Outlook Magazine and listed in Forbes India's list of 30 under 30. He has also worked as a commercial litigator and partner in top law firms, written papers cited widely in local and international publications and taught courses at NLS and NLU.&lt;/p&gt;
&lt;p&gt;RSVP &lt;a class="external-link" href="https://forms.gle/CGei6wNUbR4t92549"&gt;here&lt;/a&gt;, or by sending an email Torsha (torsha@cis-india.org).&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/events/how-to-shutdown-internet-shutdowns'&gt;https://cis-india.org/internet-governance/events/how-to-shutdown-internet-shutdowns&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>pranav</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Event</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2020-02-03T11:13:12Z</dc:date>
   <dc:type>Event</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/automated-facial-recognition-systems-and-the-mosaic-theory-of-privacy-the-way-forward">
    <title>Automated Facial Recognition Systems and the Mosaic Theory of Privacy: The Way Forward</title>
    <link>https://cis-india.org/internet-governance/automated-facial-recognition-systems-and-the-mosaic-theory-of-privacy-the-way-forward</link>
    <description>
        &lt;b&gt; Arindrajit Basu and Siddharth Sonkar have co-written this blog as the third of their three-part blog series on AI Policy Exchange under the parent title: Is there a Reasonable Expectation of Privacy from Data Aggregation by Automated Facial Recognition Systems? &lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;The Mosaic Theory of Privacy&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;Whether the data collected by the AFRS should be treated similar to 
face photographs taken for the purposes of ABBA is not clear in the 
absence of judicial opinion. The AFRS would ordinarily collect 
significantly more data than facial photographs during authentication. 
This can be explained with the help of the &lt;em&gt;&lt;a href="https://www.lawfareblog.com/defense-mosaic-theory" rel="noreferrer noopener" target="_blank"&gt;mosaic theory of privacy&lt;/a&gt;&lt;/em&gt;.&lt;/p&gt;
&lt;p&gt;The mosaic theory of privacy suggests that data collected for long 
durations of an individual can be qualitatively different from single 
instances of observation. It argues that aggregating data from different
 instances can create a picture of an individual which affects her 
reasonable expectation of privacy. This is because a mere slice of 
information reveals a lot less if the same is contextualised in a broad 
pattern — a mosaic.&amp;nbsp;&amp;nbsp; &amp;nbsp;&lt;/p&gt;
&lt;p&gt;The mosaic theory of privacy does not find explicit reference in 
Puttaswamy II. The petitioners had argued that seeding of Aadhaar data 
into existing databases would bridge information across silos so as to 
make real time surveillance possible. This is because information when 
integrated from different silos becomes more than the sum of its parts.&lt;/p&gt;
&lt;p&gt;The Court, however, dismissed this argument, accepting UIDAI’s 
submission that the data collected remains in different silos and 
merging is not permitted within the Aadhaar framework. Therefore, the 
Court did not examine whether it is constitutionally permissible to 
integrate data from different silos; it simply rejected the possibility 
of surveillance as a result of Aadhaar authentication.&lt;/p&gt;
&lt;p&gt;Jurisprudence in other jurisdictions is more advanced. In&amp;nbsp;&lt;em&gt;United States v. Jones&lt;/em&gt;,
 the United States Supreme Court&amp;nbsp;had observed that the insertion of a 
global positioning system into Antoine Jones’ Jeep in the absence of a 
warrant and without his consent invaded his privacy, entitling him to 
Fourth Amendment Protection. In this case, the movement of Jones’ 
vehicle was monitored for a period of twenty-eight days. Five concurring
 opinions in Jones acknowledges that aggregated and extensive 
surveillance is capable of violating the reasonable expectation of 
privacy irrespective of whether or not surveillance has taken place in 
public.&lt;/p&gt;
&lt;p&gt;The Court distinguished between prolonged surveillance and short term
 surveillance. Surveillance in the short run does not reveal what a 
person repeatedly does, as opposed to sustained surveillance which can 
reveal significantly more about a person. The Court takes the example of
 how a sequence of trips to a bar, a bookie, a gym or a church can tell a
 lot more about a person than the story of any single visit viewed in 
isolation.&lt;/p&gt;
&lt;p&gt;Most recently, in&lt;a href="https://www.supremecourt.gov/opinions/17pdf/16-402_h315.pdf" rel="noreferrer noopener" target="_blank"&gt; &lt;em&gt;Carpenter v. United States&lt;/em&gt;&lt;/a&gt;,
 the Supreme Court of the United States held that the collection of&amp;nbsp; 
historical cell data by the government&amp;nbsp; exposes the physical movements 
of an individual to potential surveillance, and an individual holds a 
reasonable expectation of privacy against such&amp;nbsp; collection. The Court 
admitted that historical-cell site information allows the government to 
go back in time in order to retract the exact whereabouts of a person.&lt;/p&gt;
&lt;p&gt;Judicial decisions have not addressed specifically whether facial 
recognition through law enforcement constitutes a search under the 
Fourth Amendment or a “mere visual observation”.&lt;/p&gt;
&lt;p&gt;The common thread linking CCTV footages and cellular data is the 
unique ability to track the movement of an individual from one place to 
another, enabling extreme forms of surveillance. It is perhaps this 
crucial link that would make ARFS-enabled CCTVs prejudicial to 
individual privacy.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;The mosaic theory as understood in &lt;em&gt;Carpenter&lt;/em&gt; helps one 
understand the extent to which an AFRS can augment the capacities of law
 enforcement in India. This in turn can help in understanding whether it
 is constitutionally permissible to install such systems&amp;nbsp;across the 
country.&lt;/p&gt;
&lt;p&gt;AFRS enabled-CCTV footages from different CCTVs. if viewed in 
conjunction could reveal a sequence of movements of an individual, 
enabling long-term surveillance of a nature that is qualitatively 
distinct from isolated observances observed across unrelated CCTV 
footages.&lt;/p&gt;
&lt;p&gt;Subsequent to &lt;em&gt;Carpenter&lt;/em&gt;, &lt;a href="https://www.lawfareblog.com/four-months-later-how-are-courts-interpreting-carpenter" rel="noreferrer noopener" target="_blank"&gt;federal district courts&lt;/a&gt;
 in the United States have declined to apply Carpenter to video 
surveillance cases since the judgement did not “call into question 
conventional surveillance techniques and tools, such as security 
cameras.”&lt;/p&gt;
&lt;p&gt;The extent of processing that an AFRS-enabled CCTV exposes an 
individual to would be significantly greater. This is because every time
 an individual is in the zone of a AFRS-enabled CCTV, the facial image 
will be compared to a common database. Snippets from different CCTVs 
capturing the individual’s physical presence in two different locations 
may not be meaningful per se. When observed together, the AFRS will make
 it possible to identify the individual’s movement from one place to 
another.&lt;/p&gt;
&lt;p&gt;For instance, the AFRS will be able to identify the person when they 
are on Street A at a particular time and when they are Street B in the 
immediately subsequent hour recorded by respective CCTV cameras, 
indicating the person’s physical movement from A to B. While a CCTV 
camera only records movement of an individual in video format, AFRS 
translates that digital information into individualised data with the 
help of a comparison of facial features with a pre-existing database.&lt;/p&gt;
&lt;p&gt;Through data aggregation, which appears to be the aim of the Indian 
government&amp;nbsp;in their tender that links three databases, it is apparent 
that the right to privacy is in danger. Yet,&amp;nbsp;at present, there does not 
exist any case law or legislation that can render such&amp;nbsp;efforts illegal 
at this juncture.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Conclusions and The Way Forward&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;Despite a lack of judicial recognition of the potential 
unconstitutionality of deploying&amp;nbsp;AFRS, it is clear that the introduction
 of these systems pose a clear and present danger to civil rights and 
human dignity. Algorithmic surveillance alters a human being’s life in 
ways that even the subject of this surveillance cannot fully comprehend.
 As an individual’s data is manipulated and aggregated to derive&amp;nbsp;a 
pattern about that individual’s world, the individual or his data no 
longer exists for itself&lt;sup&gt; &lt;/sup&gt;but are massaged into various categories.&lt;/p&gt;
&lt;p&gt;Louis Amoore terms this a ‘&lt;a href="https://journals.sagepub.com/doi/abs/10.1177/0263276411417430?journalCode=tcsa" rel="noreferrer noopener" target="_blank"&gt;data-derivative&lt;/a&gt;’,
 which is an abstract conglomeration of data that continuously shapes 
our futures without us having a say in their framing. The branding of an
 individual as a criminal and then aggregating their data causes 
emotional distress as individuals move about in fear of the state gaze 
and their association with activities that are branded as potentially 
dangerous — thereby suppressing a right to dissent — as exemplified by 
their use reported use during the recent protests in Hong Kong.&lt;/p&gt;
&lt;p&gt;Case law both in India and abroad has clearly suggested that a right 
to privacy is contextual and is not surrendered merely because an 
individual is in a public place. However, the jurisprudence protecting 
public photography or videography under the umbrella of privacy remains 
less clear globally and non-existent in India.&lt;/p&gt;
&lt;p&gt;The mosaic theory of privacy is useful in this regard as it prevents 
mass ‘data-veillance’ of individual behaviour and accurately identifies 
the unique power that the volume, velocity and variety of Big Data 
provides to the state. Therefore, it is imperative that the judiciary 
recognise safeguards from data aggregation as an essential component of a
 reasonable expectation of privacy. At the same time, legislation could 
also provide the required safeguards.&lt;/p&gt;
&lt;p&gt;In the US, Senators Coons and Lee recently introduced a draft Bill titled ‘&lt;a href="https://www.coons.senate.gov/imo/media/doc/ALB19A70.pdf" rel="noreferrer noopener" target="_blank"&gt;The Facial Recognition Technology Warrant Act of 2019’&lt;/a&gt;.
 The Bill aims to impose reasonable restrictions on the use of facial 
recognition technology by law enforcement. The Bill creates safeguards 
against sustained tracking of physical movements of an individual in 
public spaces. The Bill terms such tracking ‘ongoing surveillance’ when 
it occurs for over a period of 72 hours in real time or through 
application of technology to historical records. The Bill requires that 
ongoing surveillance only be conducted for law enforcement purposes &lt;em&gt;and&lt;/em&gt; in pursuance of a Court Order (unless it is impractical to do so).&lt;/p&gt;
&lt;p&gt;While the Bill has its textual problems, it is definitely worth 
considering as a model going forward and ensure that AFR systems are 
deployed in line with a rights-respecting reading of a reasonable 
expectation of privacy.&amp;nbsp; &lt;a href="http://datagovernance.org/report/adoption-and-regulation-of-facial-recognition-technologies-in-india" rel="noreferrer noopener" target="_blank"&gt;Parsheera&lt;/a&gt;
 suggests that the legislation should narrow tailoring of the objects 
and purposes for deployment of AFRS, restrictions on the person whose 
images may be scanned from the databases, judicial approval for its use 
on a case by case basis and effective mechanisms of oversight, analysis 
and verification.&lt;/p&gt;
&lt;p&gt;Appropriate legal intervention is crucial. A failure to implement 
this effectively jeopardizes the expression of our true selves and the 
core tenets of our democracy.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/automated-facial-recognition-systems-and-the-mosaic-theory-of-privacy-the-way-forward'&gt;https://cis-india.org/internet-governance/automated-facial-recognition-systems-and-the-mosaic-theory-of-privacy-the-way-forward&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Arindrajit Basu, Siddharth Sonkar</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Cybersecurity</dc:subject>
    
    
        <dc:subject>Cyber Security</dc:subject>
    
    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2020-01-02T14:12:38Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns">
    <title>Automated Facial Recognition Systems (AFRS): Responding to Related Privacy Concerns</title>
    <link>https://cis-india.org/internet-governance/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns</link>
    <description>
        &lt;b&gt;Arindrajit Basu and Siddharth Sonkar have co-written this blog as the second of their three-part blog series on AI Policy Exchange under the parent title: Is there a Reasonable Expectation of Privacy from Data Aggregation by Automated Facial Recognition Systems? &lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;The Supreme Court of India, in &lt;a href="https://indiankanoon.org/doc/91938676/"&gt;Puttaswamy I&lt;/a&gt;&lt;em&gt; &lt;/em&gt;recognized&lt;em&gt;&amp;nbsp;&lt;/em&gt;that
 the right to privacy is not surrendered merely because the individual 
is in a public place. Privacy is linked to the individual as it is an 
essential facet of human dignity. Justice Chelameswar further clarified 
that privacy is contextual. Even in a public setting, people trying to 
converse in whispers would signal a claim to the right to privacy. 
Speaking on a loudspeaker would naturally not signal the same claim.&lt;/p&gt;
&lt;p&gt;The Supreme Court of Canada has also affirmed the notion of 
contextual privacy. As recently as on 7 March, 2019, the Supreme Court 
of Canada &lt;a href="http://www.thecourt.ca/r-v-jarvis-carving-out-a-contextual-approach-to-privacy/" rel="noreferrer noopener" target="_blank"&gt;in a landmark decision&lt;/a&gt; defined privacy rights in public areas implicitly applying &lt;a href="https://crypto.stanford.edu/portia/papers/RevnissenbaumDTP31.pdf"&gt;Helena Nissenbaum’s theory of contextual integrity&lt;/a&gt;.
 Helena Nissenbaum explains that the extent to which the right to 
privacy is eroded in public spaces with the help of her theory of 
contextual integrity.&lt;/p&gt;
&lt;p&gt;Nissenbaum suggests that labelling information as exclusively public 
or private fails to take into account the context which rationalises the
 desire of the individual to exercise her privacy in public. To explain 
this with an illustration, there exists a reasonable expectation of 
privacy in the restroom of a restaurant, even though it is in a public 
space.&lt;/p&gt;
&lt;p&gt;In &lt;a href="http://www.thecourt.ca/r-v-jarvis-carving-out-a-contextual-approach-to-privacy/"&gt;&lt;em&gt;R v Jarvis&lt;/em&gt;&lt;/a&gt; (Jarvis), the Court overruled a Court of Appeal for Ontario &lt;a href="https://www.canlii.org/en/on/onca/doc/2017/2017onca778/2017onca778.pdf"&gt;decision&lt;/a&gt;
 to hold that people can have a reasonable expectation of privacy even 
in public spaces. In this case, Jarvis was charged with the offence of 
voyeurism for secretly recording his students. The primary issue that 
the&amp;nbsp; Supreme Court of Canada was concerned with was whether the students
 filmed by Mr. Jarvis enjoyed a reasonable expectation of privacy at 
their school.&lt;/p&gt;
&lt;p&gt;The Court in this case unanimously held that students did indeed have
 a reasonable expectation of privacy.&amp;nbsp; The Court concluded nine 
contextual factors relevant in determining whether a person has a 
reasonable expectation to privacy would arise. The listed factors were:&lt;/p&gt;
&lt;p&gt;“1. The location the person was in when he or she was observed or recorded,&lt;/p&gt;
&lt;p&gt;2. The nature of the impugned conduct (whether it consisted of observation or recording),&lt;/p&gt;
&lt;p&gt;3. Awareness of or consent to potential observation or recording,&lt;/p&gt;
&lt;p&gt;4. The manner in which the observation or recording was done,&lt;/p&gt;
&lt;p&gt;5. The subject matter or content of the observation or recording,&lt;/p&gt;
&lt;p&gt;6. Any rules, regulations or policies that governed the observation or recording in question,&lt;/p&gt;
&lt;p&gt;7. The relationship between the person who was observed or recorded and the person who did the observing or recording,&lt;/p&gt;
&lt;p&gt;8. The purpose for which the observation or recording was done, and&lt;/p&gt;
&lt;p&gt;9. The personal attributes of the person who was observed or recorded.” (paragraph 29 of the judgement).&lt;/p&gt;
&lt;p&gt;The Court emphasized that the factors are not an exhaustive list, but
 rather were meant to be a guiding tool in determining whether a 
reasonable expectation of privacy existed in a given context. It is not 
necessary that each of these factors is present in a given situation to 
give rise to an expectation of privacy.&lt;/p&gt;
&lt;p&gt;Compared to the above-mentioned factors in Jarvis, the Indian Supreme Court in &lt;a href="https://indiankanoon.org/doc/127517806/"&gt;Justice K.S Puttaswamy (Retd.) v. Union of India&lt;/a&gt;: Justice Sikri (Puttaswamy II) &lt;strong&gt;—&lt;/strong&gt;
 the case which upheld the constitutionality of the Aadhaar project 
relied on the following factors to determine a reasonable expectation of
 privacy in a given context:&lt;/p&gt;
&lt;p&gt;“(i) What is the context in which a privacy claim is set up?&lt;/p&gt;
&lt;p&gt;(ii) Does the claim relate to private or family life, or a confidential relationship?&lt;/p&gt;
&lt;p&gt;(iii) Is the claim a serious one or is it trivial?&lt;/p&gt;
&lt;p&gt;(iv) Is the disclosure likely to result in any serious or significant injury and the nature and extent of disclosure?&lt;/p&gt;
&lt;p&gt;(v) Is disclosure relates to personal and sensitive information of an identified person?&lt;/p&gt;
&lt;p&gt;(vi) Does disclosure relate to information already disclosed publicly? If so, its implication?”&lt;/p&gt;
&lt;p&gt;These factors (acknowledged in Puttaswamy II in paragraph 292) seem 
to be very similar to the ones laid down in Jarvis, i.e., there is a 
strong reliance on the context in both cases. While there is no explicit
 mention of individual attributes of the individual claiming a 
reasonable expectation, the holding that children should be given an opt
 out indicates that the Court implicitly takes into account personal 
attributes (e.g. age) as well.&lt;/p&gt;
&lt;p&gt;The Court in Jarvis further (in paragraph 39) took the example of a 
woman in a communal change room at a public pool. She may expect other 
users to incidentally observe her undress but she would continue to 
expect only other women in the change room to observe her and reserve 
her rights against the general public. She would also expect not to be 
video recorded or photographed while undressing, both from other users 
of the pool and by the general public.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;If it is later found out that the change room had a one-way glass 
which allowed the pool staff to view the users change — or if there was a
 concealed camera recording persons while they were changing, she could 
claim a breach of her reasonable expectation of privacy under such 
circumstances and it would constitute an invasion of privacy.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;So, in the context of an AFRS, an individual walking down a 
public road may still signal that they wish to avail of their right to 
privacy. In such contexts, a concerted surveillance mechanism may come 
up against constitutional&amp;nbsp; roadblocks.&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;What is the nature of information being collected?&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;The second big question &lt;strong&gt;—&lt;/strong&gt; the nature of information 
which is being collected plays a role in determining the extent to which
 a person can exercise their reasonable expectation of privacy. 
Puttaswamy II laid down that collection of core biometric information 
such as fingerprints, iris scans in the context of the Aadhaar-Based 
Biometric Authentication (‘ABBA’) is constitutionally permissible. The 
basis of this conclusion is that the Aadhaar Act does not deal with the 
individual’s intimate or private sphere.&lt;/p&gt;
&lt;p&gt;The judgement of the Supreme Court in Puttaswamy II is in a very 
specific context (i.e. the ABBA). It does not explain or identify the 
contextual factors which determine the extent to which privacy may be 
reasonably expected over biometrics generally. In this judgment, the 
Court observed that demographic information and photographs do not raise
 a reasonable expectation of privacy under Article 21 unless there exist
 special circumstances such as the disclosure of juveniles in conflict 
of law or a rape victim’s identity.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Most importantly, the Court held that face photographs for 
the purpose of identification are not covered by a reasonable 
expectation of privacy. The Court distinguished face photographs from 
intimate photographs or those photographs which concern confidential 
situations. &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Face photographs, according to the Court, are shared by 
individuals in the ordinary course of conduct for the purpose of 
obtaining a driving &lt;/strong&gt;l&lt;strong&gt;icense, voter id, passport, 
examination admit cards, employment cards, and so on. Face photographs 
by themselves reveal no information.&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;Naturally, this&amp;nbsp;pronouncement of the Apex Court is a huge boost for the introduction of AFRS in India.&lt;/p&gt;
&lt;p&gt;Abroad, however, on 4 September 2019, in &lt;a href="https://www.judiciary.uk/wp-content/uploads/2019/09/bridges-swp-judgment-Final03-09-19-1.pdf"&gt;Edward Bridges v. Chief Constable of South Wales Police&lt;/a&gt;, a Division Bench of the High Court in England and Wales heard a challenge against an AFRS introduced by law enforcement (&lt;em&gt;see&lt;/em&gt;
 Endnote 1). The High Court rejected a claim for judicial review holding
 that the AFRS in question does not violate inter alia the right to 
privacy under Article 8 of the European Convention of Human Rights 
(‘ECHR’).&lt;/p&gt;
&lt;p&gt;According to the Court, the AFRS was used for specific and limited 
purposes, i.e., only when the image of the public matched a person on an
 existing watchlist. The use of the AFRS was therefore considered a 
lawful and fair restriction.&lt;/p&gt;
&lt;p&gt;The Court, however, acknowledged that extracting biometric data 
through AFRS is “well beyond the expected and unsurprising”. This seems 
to be a departure from the Indian Supreme Court’s observation in 
Puttaswamy II that there is no reasonable expectation of privacy over 
biometric data in the context of ABBA, and may be a wiser approach for 
the Indian courts to adopt.&lt;/p&gt;
&lt;h6&gt;&lt;strong&gt;Endnote &lt;/strong&gt;&lt;/h6&gt;
&lt;p&gt;1. The challenge was put forth by Edward Bridges, a civil liberties 
campaigner from Cardiff for being caught on camera in two particular 
deployments of the AFRS a) when he was at Queen Street, a busy shopping 
area in Cardiff and b) when he was at the Defence Procurement, Research,
 Technology and Exportability Exhibition held at the Motorpoint Arena.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;This was published by &lt;a class="external-link" href="https://aipolicyexchange.org/2019/12/28/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns/"&gt;AI Policy Exchange&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns'&gt;https://cis-india.org/internet-governance/automated-facial-recognition-systems-afrs-responding-to-related-privacy-concerns&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Arindrajit Basu, Siddharth Sonkar</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Cybersecurity</dc:subject>
    
    
        <dc:subject>Cyber Security</dc:subject>
    
    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2020-01-02T14:09:14Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns">
    <title>Decrypting Automated Facial Recognition Systems (AFRS) and Delineating Related Privacy Concerns</title>
    <link>https://cis-india.org/internet-governance/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns</link>
    <description>
        &lt;b&gt;Arindrajit Basu and Siddharth Sonkar have co-written this blog as the first of their three-part blog series on AI Policy Exchange under the parent title: Is there a Reasonable Expectation of Privacy from Data Aggregation by Automated Facial Recognition Systems?&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;The use of aggregated Big Data by governments has the potential to 
exacerbate power asymmetries and erode civil liberties like few 
technologies of the past. In order to guard against the aggressive&amp;nbsp; 
aggregation&amp;nbsp;and manipulation of&amp;nbsp;the data generated by individuals&amp;nbsp;who 
are branded&amp;nbsp;as suspect, it is critical that our firmly established 
constitutional rights protect human dignity in the face of this 
potential erosion.&lt;/p&gt;
&lt;p&gt;The increasing ubiquity of Automated Facial Recognition Systems 
(AFRS) serve as a prime example of the rising desire of governments to 
push fundamental rights to the brink. With AFRS, the core fundamental 
right in question is privacy, although questions have been posed 
regarding the potential violation of&amp;nbsp;other related rights, such as the 
Right to Equality and the Right to Free Speech and Expression, as well.&lt;/p&gt;
&lt;p&gt;There is a rich corpus of literature, (see &lt;a href="https://indianexpress.com/article/opinion/columns/digital-identification-facial-recognition-system-ncrb-5859072/" rel="noreferrer noopener" target="_blank"&gt;here&lt;/a&gt;, &lt;a href="http://www.unswlawjournal.unsw.edu.au/wp-content/uploads/2017/09/40-1-11.pdf" rel="noreferrer noopener" target="_blank"&gt;here&lt;/a&gt; and an excellent recent paper by Smriti Parsheera &lt;a href="http://datagovernance.org/report/adoption-and-regulation-of-facial-recognition-technologies-in-india" rel="noreferrer noopener" target="_blank"&gt;here)&lt;/a&gt;
 from a diverse coterie of scholars that call out the challenges posed 
by AFRS, particularly with respect to its proportionality as a 
restriction over the right to privacy. Our contribution to this 
discourse focuses on a very specific question around a ‘reasonable 
expectation of privacy’ — the standard identified for the protection of 
privacy in public spaces across jurisdictions, including in India. This 
is because at this juncture, the precise nature of the AFRS which will 
eventually be used and the regulations it will be subject to are not 
clear.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;In &lt;a href="https://indiankanoon.org/doc/91938676/'"&gt;Retd. K.S &lt;/a&gt;&lt;a href="https://indiankanoon.org/doc/91938676/" rel="noreferrer noopener" target="_blank"&gt;Puttaswamy (Retd.) v. Union of India&lt;/a&gt;:
 Justice Chandrachud (Puttaswamy I), the Indian Supreme Court was 
concerned with the question whether there exists a fundamental right to 
privacy under the Indian Constitution. A nine-judge bench of the Court 
recognized that the right to privacy is a fundamental right implicit 
inter alia in the right to life within Article 21 of the Constitution.&lt;/p&gt;
&lt;p&gt;The right to privacy protects people and not places. Every person is 
entitled, however, to a reasonable expectation of privacy. The 
expectation of privacy must be twofold. First, the person must prove 
that the alleged act could inflict some harm. Such harm must be real and
 not be speculative or imaginary. Second, society must recognize this 
expectation as reasonable. The test of reasonable expectations is 
contextual, i.e., the extent to which it safeguards privacy depends on 
the place at which the individual is.&lt;/p&gt;
&lt;p&gt;In order to pass any constitutional test, therefore, AFRS must 
satisfy the ‘reasonable expectation’ test articulated in Puttaswamy. 
However, in this context, the test itself has multiple contours. Do we 
have a right to privacy in a public place? Is AFRS collecting any data 
that specifically violates a right to privacy? Is the aggregation of 
that data a potential violation?&lt;/p&gt;
&lt;p&gt;After providing a brief introduction to the use cases of AFRS in 
India and across the world, we embark upon answering all these 
questions.&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Primer on Automated Facial Recognition Systems (AFRS)&lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;Facial recognition is a biometric technology that utilises cameras to
 match stored or live footage of individuals (including both stills and 
moving footage) with images or video&amp;nbsp;from an existing database. Some 
systems might also be used to analyze broader demographic trends or 
conduct sentiment analysis through crowd scanning.&lt;/p&gt;
&lt;p&gt;While the use of photographs and video footage have been core 
components of police investigation, the use of algorithms to process 
vast tracts of Big Data (characterized by ‘Volume, Velocity, and 
Variety), and compare disparate and discrete data points allows for the 
derivation of hitherto unfeasible insights on the subjects of Big Data.&lt;/p&gt;
&lt;p&gt;The utilisation of AFRS for law enforcement is rapidly spreading around the world. &lt;a href="https://carnegieendowment.org/2019/09/17/global-expansion-of-ai-surveillance-pub-79847" rel="noreferrer noopener" target="_blank"&gt;A Global AI Surveillance Index&lt;/a&gt;
 compiled by the Carnegie Endowment for International Peace found that 
at least sixty-four countries are incorporating facial recognition 
systems into their AI surveillance programs.&lt;/p&gt;
&lt;p&gt;Chinese technology company Yitu has entered into a partnership with 
security forces in Malaysia to equip police officers with facial 
recognition body cameras that, powered by enabling technologies, would 
allow a comparison of images caught by the live body cameras with images
 from several central databases.&lt;/p&gt;
&lt;p&gt;In &lt;a href="https://news.sky.com/story/met-polices-facial-recognition-tech-has-81-error-rate-independent-report-says-11755941" rel="noreferrer noopener" target="_blank"&gt;England and Wales&lt;/a&gt;,
 London Metropolitan Police, South Wales Police, and Leicestershire 
Police are all in the process of developing technologies that allow for 
the identification and comparison of live images with those stored in a 
database.&lt;/p&gt;
&lt;p&gt;The technology is being developed by Japanese firm NEC and the police
 force has limited ability to oversee or modify the software, given its 
proprietary nature. The Deputy Chief of South Wales Police stated that 
“the tech is given to [them] as a sealed box… [and the police force 
themselves] have no input – whatever it does, it does what it does.”&lt;/p&gt;
&lt;p&gt;In the US, &lt;a href="https://www.americanbar.org/groups/criminal_justice/publications/criminal-justice-magazine/2019/spring/facial-recognition-technology/" rel="noreferrer noopener" target="_blank"&gt;Baltimore’s police&lt;/a&gt;
 set up facial recognition cameras to track and arrest protestors — a 
system that reached its zenith during the 2018 riots in the city.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;It is suspected that authorities in &lt;a href="https://www.japantimes.co.jp/news/2019/10/23/asia-pacific/hong-kong-protests-ai-facial-recognition-tech/#.Xf1Fs_zhVPY" rel="noreferrer noopener" target="_blank"&gt;Hong Kong&lt;/a&gt; are also using AFRS to clamp down on the ongoing pro-democracy protests.&lt;/p&gt;
&lt;p&gt;In India, the Ministry of Home Affairs, through the National Crime Records Bureau put out a &lt;a href="http://ncrb.gov.in/TENDERS/AFRS/RFP_NAFRS.pdf" rel="noreferrer noopener" target="_blank"&gt;tender for a new AFRS&lt;/a&gt;,
 whose stated objective is to “act as a foundation for national level 
searchable platform of facial images.” The AFRS will pull facial image 
data from CCTV feeds and compare these with existing records across 
databases including the Crime and Criminal Tracking Networks and Systems
 (CCTNS), Inter-operable Criminal Justice System (or ICJS), Immigration 
Visa Foreigner Registration Tracking (IVFRT), Passport, Prisons and 
state police records.&lt;/p&gt;
&lt;p&gt;Plans are also afoot to integrate this with the yet to be deployed 
National Automated Fingerprint Identification System (NAFIS), thereby 
creating a multi-faceted surveillance system.&lt;/p&gt;
&lt;p&gt;Despite raising eyeballs due to its potential all-pervasive scope, 
this tender is not the first instance of AFRS being used by Indian 
authorities. Punjab Police, &lt;a href="https://www.livemint.com/AI/DIh6fmR6croUJps6x7JW5K/Meet-Staqu-a-startup-helping-Indian-law-enforcement-agencie.html" rel="noreferrer noopener" target="_blank"&gt;in partnership with Gurugram-based start-up Staqu&lt;/a&gt;
 has launched and commenced implementation of&amp;nbsp; the Punjab Artificial 
Intelligence System (PAIS) which uses digitised criminal records and 
automated facial recognition to retrieve information on a suspected 
criminal and essentially tracks their public whereabouts, which poses 
potential constitutional questions.&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;This was published by &lt;a class="external-link" href="https://aipolicyexchange.org/2019/12/26/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns/"&gt;AI Policy Exchange&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns'&gt;https://cis-india.org/internet-governance/decrypting-automated-facial-recognition-systems-afrs-and-delineating-related-privacy-concerns&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Arindrajit Basu, Siddharth Sonkar</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Cybersecurity</dc:subject>
    
    
        <dc:subject>Cyber Security</dc:subject>
    
    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2020-01-02T14:01:48Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/extra-territorial-surveillance-and-the-incapacitation-of-human-rights">
    <title>Extra-Territorial Surveillance and the Incapacitation of Human Rights</title>
    <link>https://cis-india.org/internet-governance/extra-territorial-surveillance-and-the-incapacitation-of-human-rights</link>
    <description>
        &lt;b&gt;This paper was published in Volume 12 (2) of the NUJS Law Review. &lt;/b&gt;
        
&lt;div&gt;&amp;nbsp;&lt;/div&gt;
&lt;div&gt;Our 
networked data trails dictate, define, and modulate societies in hitherto
 inconceivable ways. The ability to access and manipulate that data is a
 product of stark power asymmetry in geo-politics, leading to a dynamic 
that privileges the interests of a few over the right to privacy and 
dignity of the many. I argue that the persistent de facto violation of 
human rights norms through extraterritorial surveillance conducted by 
western intelligence agencies, compounded by the failure of judicial 
intervention in the West has lead to the incapacitation of international
 human rights law. Despite robust jurisprudence including case law, 
comments by the United Nations, and widespread state practice on the 
right to privacy and the application of human rights obligations to 
extraterritorial stakeholders, extraterritorial surveillance continues 
with aplomb. Procedural safeguards and proportionality tests regularly 
sway towards a ‘ritual incantation’ of national security even in 
scenarios where a less intrusive option is available. The vulnerable 
citizen abroad is unable to challenge these processes and becomes an 
unwitting victim of nefarious surveillance practices that further widens
 global power asymmetry and entrenches geo-political fissures.&lt;/div&gt;
&lt;div&gt;&lt;br /&gt;The full article can be found &lt;a href="https://cis-india.org/internet-governance/extraterritorial-algorithmic-surveillance-and-the-incapacitation-of-international-human-rights-law" class="internal-link" title="EXTRATERRITORIAL ALGORITHMIC SURVEILLANCE AND THE INCAPACITATION OF INTERNATIONAL HUMAN RIGHTS LAW"&gt;here&lt;/a&gt;.&lt;/div&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/extra-territorial-surveillance-and-the-incapacitation-of-human-rights'&gt;https://cis-india.org/internet-governance/extra-territorial-surveillance-and-the-incapacitation-of-human-rights&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Arindrajit Basu</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Cybersecurity</dc:subject>
    
    
        <dc:subject>Cyber Security</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2020-01-02T11:02:26Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/article-19-akriti-bopanna-and-ephraim-percy-kenyanito-december-16-2019-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments">
    <title>ICANN takes one step forward in its human rights and accountability commitments</title>
    <link>https://cis-india.org/internet-governance/blog/article-19-akriti-bopanna-and-ephraim-percy-kenyanito-december-16-2019-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments</link>
    <description>
        &lt;b&gt;Akriti Bopanna and Ephraim Percy Kenyanito take a look at ICANN's Implementation Assessment Report for the Workstream 2 recommendations and break down the key human rights considerations in it. Akriti chairs the Cross Community Working Party on Human Rights at ICANN and Ephraim works on Human Rights and Business for Article 19, leading their ICANN engagement.&lt;/b&gt;
        
&lt;p style="text-align: justify;"&gt;The article was first&lt;a class="external-link" href="https://www.article19.org/resources/blog-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments/"&gt; published on Article 19&lt;/a&gt; on December 16, 2019&lt;/p&gt;
&lt;hr style="text-align: justify;" /&gt;
&lt;p style="text-align: justify;"&gt;ICANN is the international non-profit organization that brings together various stakeholders to create policies aimed at coordinating the Domain Name System. Some of these stakeholders include representatives from government, civil society, academia, the private sector, and the technical community.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;During the recently concluded 66th International Meeting of the Internet Corporation for Assigned Names and Numbers (ICANN) in Montreal (Canada); the ICANN board adopted by consensus the recommendations contained within the Work Stream 2 (WS2) Final Report. This report was generated as part of steps towards accountability after the September 30th 2016 U.S. government handing over of its unilateral control over ICANN, through its previous stewardship role of the Internet Assigned Names and Numbers Authority (IANA).&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The Workstream 2 Recommendations on Accountability are seen as a big step ahead in the incorporation of human rights in ICANN’s various processes, with over 100 recommendations on aspects ranging from diversity to transparency. &amp;nbsp;An Implementation Team has been constituted which comprises the Co-chairs and the rapporteurs from the WS2 subgroups. They will primarily help the ICANN organization in interpreting recommendations of the groups where further clarification is needed on how to implement the same. As the next step, an Implementation Assessment Report has recently been published which looks at the various resources and steps needed. The steps are categorized into actions meant for one of the 3; the ICANN Board, Community and the ICANN organization itself. These will be funded by ICANN’s General Operating Fund, the Board and the org.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The report is divided into the following 8 issues: 1) Diversity, 2) Guidelines for Good Faith, 3) Recommendations for a Framework of Interpretation for Human Rights, 4) Jurisdiction of Settlement of Dispute Issues, 5) Recommendations for Improving the ICANN Office of the Ombudsman, 6) Recommendations to increase SO/ AC Accountability, 7) Recommendations to increase Staff Accountability and 8) Recommendations to improve ICANN Transparency.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;This blog will take a look at the essential human rights related considerations of the report and how the digital rights community can get involved with the effectuation of the recommendations.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&lt;strong&gt;Diversity&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The core issues concerning the issue of diversity revolve around the need for a uniform definition of the parameters of diversity and a community discussion on the ones already identified; geographic representation, language, gender, age, physical disability, diverse skills and stakeholder constituency. An agreed upon definition of all of these is necessary before its Board approval and application consistently through the various parts of ICANN. In addition, it is also required to formulate a standard template for diversity data collection and report generation. This sub group’s recommendations are estimated to be implemented in 6-18 months. Many of the recommendations need to be analyzed for compliance with the General Data Protection Regulation (GDPR) such as collecting of information relating to disability. For now, the GDPR is only referenced with no further details on how steps considered will either comply or contrast the law.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&lt;strong&gt;Good faith Guidelines&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The Empowered Community (EC) which includes all the Supporting Organizations, At-Large-Advisory-Committee and Government Advisory Council, are called upon to conceptualize guidelines to be followed when individuals from the EC are participating in Board Removal Processes. Subsequent to this, the implementation will take 6-12 months.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&lt;strong&gt;Framework of Interpretation for Human Rights&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Central to the human rights conversation and finally approved, is the Human Rights Framework of Interpretation. However the report does not give a specific timeline for its implementation, only mentioning that this process will take more than 12 months. The task within this is to establish practices of how the core value of respecting human rights will be balanced with other core values while developing ICANN policies and execution of its operations. All policy development processes, reviews, Cross Community Working Group recommendations will need a framework to consider and incorporate human rights, in tandem with the Framework of Interpretation. It will also have to be shown that policies and recommendations sent to the Board have factored in the FOI.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&lt;strong&gt;Transparency&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The recommendations focus on the following four key areas as listed below:&lt;br /&gt;1. Improving ICANN’s Documentary Information Disclosure Policy (DIDP).&lt;br /&gt;2. Documenting and Reporting on ICANN’s Interactions with Governments.&lt;br /&gt;3. Improving Transparency of Board Deliberations.&lt;br /&gt;4. Improving ICANN’s Anonymous Hotline (Whistleblower Protection).&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;The bulk of the burden for implementation is put on ICANN org with the community providing oversight and ensuring ICANN lives up to its commitments under various policies and laws. Subsequent to this, the implementation will take 6-12 months.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&lt;strong&gt;How the ICANN community can contribute to this work&lt;/strong&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;This is a defining moment on the future of ICANN and there are great opportunities for the ICANN multistakeholder community to continue shaping the future of the Internet. Some of the envisioned actions by the community include:&lt;/p&gt;
&lt;ul style="text-align: justify;"&gt;
&lt;li&gt;monitoring and assessing the performance of the various ICANN bodies, and acting on the recommendations that emerge from those accountability processes. This will only be done through collaborative formulation of processes and procedures for PDPS, CCWGs etc to incorporate HR considerations and subsequently implementation of the best practices suggested for improving SO/ACs accountability and transparency;&lt;/li&gt;
&lt;li&gt;conducting diversity assessments to inform objectives and strategies for diversity criteria;&lt;/li&gt;
&lt;li&gt;supporting contracted parties through legal advice for change in their agreements when it comes to choice of law and venue recommendations;&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;contributing to conversations where the Ombudsman can expand his/her involvement that go beyond current jurisdiction and authority&lt;/li&gt;&lt;/ul&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/article-19-akriti-bopanna-and-ephraim-percy-kenyanito-december-16-2019-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments'&gt;https://cis-india.org/internet-governance/blog/article-19-akriti-bopanna-and-ephraim-percy-kenyanito-december-16-2019-icann-takes-one-step-forward-in-its-human-rights-and-accountability-commitments&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Akriti Bopanna and Ephraim Percy Kenyanito</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>ICANN</dc:subject>
    
    
        <dc:subject>IANA</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2019-12-19T11:35:16Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/call-for-comments-model-security-standards-for-the-indian-fintech-industry">
    <title>Call for Comments: Model Security Standards for the Indian Fintech Industry</title>
    <link>https://cis-india.org/internet-governance/call-for-comments-model-security-standards-for-the-indian-fintech-industry</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;p&gt;The Centre for Internet and Society is pleased to make available the Draft document of Model Security Standards for the Indian Fintech Industry, for feedback and comments from all stakeholders. The objective of this document which was first published in November 2019, is to ensure that the data of users is dealt with in a secure and safe manner by the Fintech Industry, and that smaller businesses in the Fintech industry have a specific standard to look at in order to limit their liabilities for any future breaches. &lt;br /&gt;&lt;br /&gt;We invite any parties interested in the field of technology policy, including but not limited to lawyers, policy researchers, and engineers, to send in your feedback/comments on the draft document by the 16th of January 2020. We intend to publish our final draft by the end of January 2020. We look forward to receiving your contributions to make this document more comprehensive and effective. Please find a copy of the draft document &lt;a href="https://cis-india.org/internet-governance/resources/security-standards-for-the-financial-technology-sector-in-india" class="internal-link" title="Security Standards for the Financial Technology Sector in India"&gt;here&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/call-for-comments-model-security-standards-for-the-indian-fintech-industry'&gt;https://cis-india.org/internet-governance/call-for-comments-model-security-standards-for-the-indian-fintech-industry&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>pranav</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Financial Technology</dc:subject>
    
    
        <dc:subject>Cybersecurity</dc:subject>
    
    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Cyber Security</dc:subject>
    

   <dc:date>2019-12-16T13:16:25Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/ietf106">
    <title>IETF106</title>
    <link>https://cis-india.org/internet-governance/news/ietf106</link>
    <description>
        &lt;b&gt;Gurshabad Grover participated at IETF106, which was held in Singapore 16-22 November, 2019.&lt;/b&gt;
        &lt;p class="moz-quote-pre"&gt;In the meeting of the Human Rights Protocol Considerations (hrpc) research group, I presented an update to draft-irtf-hrpc-guidelines-03 (Guidelines for Human Rights Protocol and Architecture Considerations), which is an Internet Draft adopted by the hrpc rg that he is co-editing with Niels ten Oever. &lt;a class="external-link" href="https://datatracker.ietf.org/doc/draft-irtf-hrpc-guidelines/"&gt;More info here&lt;/a&gt;.&lt;/p&gt;
&lt;p class="moz-quote-pre" style="text-align: justify; "&gt;Among other working/research group meetings, I participated theTransport Layer Security (tls) and the Privacy Enhancements and Assessments research group (pearg) sessions. I also participated inseveral side meetings, including the Public Interest Technology Group(pitg) meeting.&lt;/p&gt;
&lt;p class="moz-quote-pre" style="text-align: justify; "&gt;Agenda for the IETF and the different WGs/RG can be found on the &lt;a class="external-link" href="https://datatracker.ietf.org/meeting/106/agenda"&gt;IETF website&lt;/a&gt;.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/ietf106'&gt;https://cis-india.org/internet-governance/news/ietf106&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2019-12-15T06:14:02Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>




</rdf:RDF>
