<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:syn="http://purl.org/rss/1.0/modules/syndication/" xmlns="http://purl.org/rss/1.0/">




    



<channel rdf:about="https://cis-india.org/search_rss">
  <title>Centre for Internet and Society</title>
  <link>https://cis-india.org</link>
  
  <description>
    
            These are the search results for the query, showing results 21 to 35.
        
  </description>
  
  
  
  
  <image rdf:resource="https://cis-india.org/logo.png"/>

  <items>
    <rdf:Seq>
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/counter-comments-on-trais-consultation-paper-on-privacy-security-and-ownership-of-data-in-telecom-sector"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/asian-age-amber-sinha-december-3-2017-"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/aadhaar-bill-fails-to-incorporate-suggestions-by-the-standing-committee"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/economic-times-july-23-2017-amber-sinha-aadhar-privacy-is-not-a-unidimensional-concept"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/cis-statement-on-right-to-privacy-judgment"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/governing-id-kenya2019s-huduma-namba-programme"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/the-wire-amber-sinha-"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/it-for-change-amber-sinha-regulating-sexist-online-harassment"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/digital-policy-portal-july-13-2016-new-approaches-to-information-privacy-revisiting-the-purpose-limitation-principle"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/the-fundamental-right-to-privacy-an-analysis"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/regulating-sexist-online-harassment-a-model-of-online-harassment-as-a-form-of-censorship"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/rethinking-national-privacy-principles"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/files/data-for-the-benefit-of-people"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/technical-appendix-to-use-of-sentiment-analysis-by-law-enforcement-an-analysis-of-scrutability-for-juridical-purposes"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/unpacking-data-protection-law-a-visual-representation"/>
        
    </rdf:Seq>
  </items>

</channel>


    <item rdf:about="https://cis-india.org/internet-governance/blog/counter-comments-on-trais-consultation-paper-on-privacy-security-and-ownership-of-data-in-telecom-sector">
    <title>Counter Comments on TRAI's Consultation Paper on Privacy, Security and Ownership of Data in Telecom Sector</title>
    <link>https://cis-india.org/internet-governance/blog/counter-comments-on-trais-consultation-paper-on-privacy-security-and-ownership-of-data-in-telecom-sector</link>
    <description>
        &lt;b&gt;The Centre for Internet &amp; Society (CIS) has commented on the Consultation Paper on Privacy, Security and Ownership of Data in Telecom Sector published by the Telecom Regulatory Authority of India on August 9, 2017.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The submission is divided in three main parts. The first part 'Preliminary' introduces the document. The second part 'About CIS' is an overview of the organization. The third part contains the 'Counter Comments' on the Consultation Paper taking into account the submission made by other stakeholders.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Download the &lt;a class="external-link" href="http://cis-india.org/internet-governance/files/counter-comments.pdf"&gt;full submission here&lt;/a&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/counter-comments-on-trais-consultation-paper-on-privacy-security-and-ownership-of-data-in-telecom-sector'&gt;https://cis-india.org/internet-governance/blog/counter-comments-on-trais-consultation-paper-on-privacy-security-and-ownership-of-data-in-telecom-sector&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-11-23T14:29:06Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/asian-age-amber-sinha-december-3-2017-">
    <title>Breeding misinformation in virtual space</title>
    <link>https://cis-india.org/internet-governance/blog/asian-age-amber-sinha-december-3-2017-</link>
    <description>
        &lt;b&gt;A well-informed citizenry and institutions that provide good information are fundamental to a functional democracy.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The phenomenon of fake news has rece-ived significant sc-holarly and  media attention over the last few years. In March, Sir Tim Berners Lee,  inventor of the World Wide Web, has called for a crackdown on fake news,  stating in an open letter that “misinformation, or fake news, which is  surprising, shocking, or designed to appeal to our biases, can spread  like wildfire.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Gartner, which annually predicts what the next year in technology  will look like, highlighted ‘increased fake news’  as one of its  predictions.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The report states that by 2022, “majority of individuals in mature  economies will consume more false information than true information. Due  to its wide popularity and reach, social media has come to play a  central role in the fake news debate.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Researchers have suggested that rumours penetrate deeper within a  social network than outside, indicating the susceptibility of this  medium. Social networks such as Facebook and communities on messaging  services such as Whats-App groups provide the perfect environment for  spreading rumours. Information received via friends tends to be trusted,  and online networks allow in-dividuals to transmit information to many  friends at once.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In order to understand the recent phenomenon of fake news, it is  important to recognise that the problem of misinformation and propaganda  has existed for a long time. The historical examples of fake news go  back centuries where, prior to his coronation as Roman Emperor, Octavian  ran a disinformation campaign against Marcus Antonius to turn the Roman  populace against him.&lt;/p&gt;
&lt;p class="imgCenter" style="text-align: justify; "&gt;&lt;a class="objectNew"&gt;&lt;img alt="aa" src="http://images.asianage.com/images/fdeb4b878fd86fc0af509a2eb0b6927a4c6fdede-tc-img-preview.jpg" title="aa" /&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The advent of the printing press in the 15th century led to  widespread publication; however, there were no standards of verification  and journalistic ethics. Andrew Pettigrew wri-tes in his The Invention  of News, that news reporting in the 16th and 17th centuries was full of  portents about “comets, celestial apparitions, freaks of nature and  natural disasters.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In India, the immediate cause for the 1857 War of Indepen-dence was  rumours that the bones of cows and pigs were mixed with flour and used  to grease the cartridges used by the sepoys.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Leading up to the Second World War, the radio emerged as a strong  medium for dissemination of disinformation, used by the Nazis and other  Axis powers. More recently, the milk miracle in the mid-1990s consisting  of stories of the idol of Ganesha drinking milk was a popular fake news  phenomenon. In 2008, rumours about the popular snack, Kurkure, being  made out of plastic became so widespread that Pepsi, its holding  company, had to publicly rebut them.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;A quick survey by us at the Centre of Internet and Society, for a  forthcoming report, of the different kinds of misinformation being  circulated in India, suggested four different kinds of fake news.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The first is a case of manufactured primary content. This includes  instances where the entire premise on which an argument is based is  patently false. In August 2017, a leading TV channel reported that  electricity had been cut to the Jama Masjid in New Delhi for non-payment  of bills. This was based on a false report carried by a news portal.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The second kind of fake news involves manipulation or editing of  primary content so as to misrepresent it as something else. This form of  fake news is often seen with respect to multimedia content such as  images, pictures, audios and videos. These two forms of fake news tend  to originate outside traditional media such as newspapers and television  channels, and can be often sourced back to social media and WhatsApp  forwards.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;However, we see such unverified stories being picked up by  traditional media. Further, there are instances where genuine content  such as text and pictures are shared with fallacious contexts and  descriptions. Earlier this year, several dailies pointed out that an  image shared by the ministry of home affairs, purportedly of the  floodlit India-Pakistan border, was actually an image of the  Spain-Morocco border. In this case, the image was not doctored but the  accompanying information was false.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Third, more complicated cases of misinformation involve the primary  content itself not being false or manipulated, but the facts when they  are reported may be quoted out of context. Most examples of  misinformation spread by mainstream media, which has more evolved  systems of fact checking and verification, and editorial controls, would  tend to fall under this.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Finally, there are instances of lack of diligence in fully  understanding the issues before reporting. Such misrepresentations are  often encountered while reporting in fields that require specialised  knowledge, such as science and technology, law, finance etc. Such forms  of misinformation, while not suggestive of malafide intent can still  prove to be quite dangerous in shaping erroneous opinions.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;While the widespread dissemination of fake news contributes greatly  to its effectiveness, it also has a lot to do with the manner in which  it is designed to pander to our cognitive biases. Directionally  motivated reasoning prompts people confronted with political information  to process it with an intention to reach a certain pre-decided  conclusion, and not with the intention to assess it in a dispassionate  manner. This further results in greater susceptibility to confirmation  bias, disconfirmation bias and prior attitude effect.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Fake news is also linked to the idea of “naïve realism,” the belief  people have that their perception of reality is the only accurate view,  and those in disagreement are necessarily uninformed, irrational, or  biased. This also explains why so much fake news simply does not engage  with alternative points of view.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;A well-informed citizenry and institutions that provide good  information are fundamental to a functional democracy. The use of the  digital medium for fast, unhindered and unchecked spread of information  presents a fertile ground for those seeking to spread misinformation.  How we respond to this issue will be vital for democratic societies in  our immediate future. Fake news presents a complex regulatory challenge  that requires the participation of different stakeholders such as the  content disseminators, platforms, norm guardians which include  institutional fact checkers, trade organisations, and “name-and-shaming”  watchdogs, regulators and consumers.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/asian-age-amber-sinha-december-3-2017-'&gt;https://cis-india.org/internet-governance/blog/asian-age-amber-sinha-december-3-2017-&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-12-08T02:24:29Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/aadhaar-bill-fails-to-incorporate-suggestions-by-the-standing-committee">
    <title>Aadhaar Bill fails to incorporate suggestions by the Standing Committee</title>
    <link>https://cis-india.org/internet-governance/blog/aadhaar-bill-fails-to-incorporate-suggestions-by-the-standing-committee</link>
    <description>
        &lt;b&gt;In 2011, a standing committee report led by Yashwant Sinha had been scathing in its indictments of the Aadhaar BIll introduced by the UPA government. Five years later, the NDA government has introduced a new bill which is a rehash of the same. I look at the concerns raised by the committee report, none of which have been addressed by the new bill.
&lt;/b&gt;
        
&lt;p id="docs-internal-guid-0c1d0148-5959-8221-80f0-984c1f109411" dir="ltr"&gt;The article was published by &lt;a class="external-link" href="http://thewire.in/2016/03/10/aadhaar-bill-fails-to-incorporate-standing-committees-suggestions-24433/"&gt;The Wire&lt;/a&gt;&lt;a class="external-link" href="https://globalvoices.org/2016/02/09/a-good-day-for-the-internet-everywhere-india-bans-differential-data-pricing/"&gt; &lt;/a&gt;on March 10, 2016&lt;/p&gt;
&lt;p dir="ltr"&gt;In December, 2010, the UPA Government introduced the National Identification Authority of India Bill, 2010 in the Parliament. It was subsequently referred to a Standing Committee on Finance by the Speaker of Lok Sabha under Rule 331E of the the Rules of Procedure and Conduct of Business in Lok Sabha. This Committee, headed by BJP leader Yashwant Sinha took evidence from the Minister of Planning and the UIDAI from the government, as well as seeking the view of parties such as the National Human Rights Commission, Indian Banks Association and researchers like Dr Reetika Khera and Dr. Usha Ramanathan. In 2011, having heard from various parties and considering the concerns and apprehensions about the UID scheme, the Committee deemed the bill unacceptable and suggested a re-consideration of the the UID scheme as well as the draft legislation.&lt;/p&gt;
&lt;p dir="ltr"&gt;The Aadhaar programme has so far been implemented under the Unique Identification Authority of India, a Central Government agency created through an executive order. This programme has been shrouded in controversy over issues of privacy and security resulting in a Public Interest Litigation filed by Judge Puttaswamy in the Supreme Court. While the BJP had criticised the project as well as the draft legislation &amp;nbsp;when it was in opposition, once it came to power and particularly, after it launched various welfare schemes like Digital India and Jan Dhan Yojna, it decided to continue with it and use Aadhaar as the identification technology for these projects. In the last year, there have been orders passed by the Supreme Court which prohibited making Aadhaar mandatory for availing services. One of the questions that the government has had to answer both inside and outside the court on the UID project is the lack of a legislative mandate for a project of this size. About five years later, the new BJP led government has come back with a rehash of the same old draft, and no comments made by the standing committee have been taken into account.&lt;/p&gt;
&lt;p dir="ltr"&gt;The Standing Committee on the old bill had taken great exception to the continued collection of data and issuance of Aadhaar numbers, while the Bill was pending in the Parliament. The report said that the implementation of the provisions of the Bill and continuing to incur expenditure from the exchequer was a circumvention of the prerogative powers of the Parliament. However, the project has continued without abeyance since its inception in 2009. I am listing below some of the issues that the Committee identified with the UID project and draft legislation, none of which have been addressed in current Bill.&lt;/p&gt;
&lt;p dir="ltr"&gt;One of the primary arguments made by proponents of Aadhaar has been that it would be useful in providing services to marginalized sections of the society who currently do not have identification cards and consequently, are not able to receive state sponsored services, benefits and subsidies. The report points that the project would not be able to achieve this as no statistical data on the marginalized sections of the society are being used to by UIDAI to provide coverage to them. The introducer systems which was supposed to provide Aadhaar numbers to those without any form of identification, has been used to enroll only 0.03% of the total number of people registered. Further, the &lt;a href="http://uidai.gov.in/UID_PDF/Committees/Biometrics_Standards_Committee_report.pdf"&gt;Biometrics Standards Committee of UIDAI&lt;/a&gt; has itself acknowledged the issues caused due to a high number of manual laborers in India which would lead to sub-optimal fingerprint scans. A &lt;a href="http://www.4gid.com/De-dup-complexity%20unique%20ID%20context.pdf"&gt;report by 4G Identity Solutions&lt;/a&gt; estimates that while in any population, approximately 5% of the people have unreadable fingerprints, in India it could lead to a failure to enroll up to 15% of the population. In this manner, the project could actually end up excluding more people.&lt;/p&gt;
&lt;p dir="ltr"&gt;The Report also pointed to a lack of cost-benefit analysis done before going ahead with scheme of this scale. It makes a reference to the &lt;a href="http://eprints.lse.ac.uk/684/1/identityreport.pdf"&gt;report&lt;/a&gt; by the London School of Economics on the UK Identity Project which was shelved due to a) huge costs involved in the project, b) the complexity of the exercise and unavailability of reliable, safe and tested technology, c) risks to security and safety of registrants, d) security measures at a scale that will result in substantially higher implementation and operational costs and e) extreme dangers to rights of registrants and public interest. The Committee Report insisted that such global experiences remained relevant to the UID project and need to be considered. However, the new Bill has not been drafted with a view to address any of these issues.&lt;/p&gt;
&lt;p dir="ltr"&gt;The Committee comes down heavily on the irregularities in data collection by the UIDAI. They raise doubts about the ability of the Registrars to effectively verify the registrants and a lack of any security audit mechanisms that could identify issues in enrollment. Pointing to the news reports about irregularities in the process being followed by the Registrars appointed by the UIDAI, the Committee deems the MoUs signed between the UIDAI and the Registrars as toothless. The involvement of private parties has been under question already with many questions being raised over the lack of appropriate safeguards in the contracts with the private contractors.&lt;/p&gt;
&lt;span id="docs-internal-guid-0c1d0148-595b-32fa-49d2-8f6a347a4c00"&gt;Perhaps the most significant observation of the Committee was that any scheme that facilitates creation of such a massive database of personal information of the people of the country and its linkage with other databases should be preceded by a comprehensive data protection law. By stating this, the Committee has acknowledged that in the absence of a privacy law which governs the collection, use and storage of the personal data, the UID project will lead to abuse, surveillance and profiling of individuals. It makes a reference to the Privacy Bill which is still at only the draft stage. The current data protection framework in the Section 43A rules under the Information Technology Act, 2000 are woefully inadequate and far too limited in their scope. While there are some protection built into Chapter VI of the new bill, these are nowhere as comprehensive as the ones articulated in the Privacy Bill. Additionally, these protections are subject to broad exceptions which could significantly dilute their impact.&lt;/span&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/aadhaar-bill-fails-to-incorporate-suggestions-by-the-standing-committee'&gt;https://cis-india.org/internet-governance/blog/aadhaar-bill-fails-to-incorporate-suggestions-by-the-standing-committee&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>UID</dc:subject>
    
    
        <dc:subject>Aadhaar</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2016-03-10T15:58:57Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/economic-times-july-23-2017-amber-sinha-aadhar-privacy-is-not-a-unidimensional-concept">
    <title>Aadhar: Privacy is not a unidimensional concept</title>
    <link>https://cis-india.org/internet-governance/blog/economic-times-july-23-2017-amber-sinha-aadhar-privacy-is-not-a-unidimensional-concept</link>
    <description>
        &lt;b&gt;Right to privacy is important not only for our negotiations with the information age but also to counter the transgressions of a welfare state. A robust right to privacy is essential for all Indian citizens to defend their individual autonomy in the face of invasive state actions purportedly for the public good.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The article was published in the &lt;a class="external-link" href="http://economictimes.indiatimes.com/news/politics-and-nation/aadhar-privacy-is-not-a-unidimensional-concept/printarticle/59716562.cms"&gt;Economic Times&lt;/a&gt; on July 23, 2017.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;The ruling of this nine-judge bench will have far-reaching impact on the extent and scope of rights available to us all. In a disappointing case of judicial evasion by the apex court, it has taken over 600 days since a reference order was passed in August 11, 2015, for this bench to be constituted. Over two days of arguments, the counsels for the petitioners have presented before the court why the right to privacy, despite not finding a mention in the Constitution of India, is a fundamental right essential to a person’s dignity and liberty, and must be read into not one but multiple articles of the Constitution. The government will make its arguments in the coming week.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;One must wonder why we are debating the contours of the right to privacy, which 40 years of jurisprudence had lulled us into believing we already had. The answer to that can be found in a series of hearings in the Aadhaar case that began in 2012. Justice KS Puttaswamy, a former Karnataka High Court judge, filed a petition before the Supreme Court, questioning the validity of the Aadhaar project due its lack of legislative basis (since then the Aadhaar Act was passed in 2016) and its transgressions on our fundamental rights. Over time, a number of other petitions also made their way to the apex court, challenging different aspects of the Aadhaar project. Since then, five different interim orders by the Supreme Court have stated that no person should suffer because they do not have an Aadhaar number. Aadhaar, according to the court, could not be made mandatory to avail benefits and services from government schemes. Further, the court has limited the use of Aadhaar to specific schemes: LPG, PDS, MGNREGA, National Social Assistance Programme, the Pradhan Mantri Jan Dhan Yojna and EPFO.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The real spanner in the works in the progress of this case was the stand taken by Mukul Rohatgi, then attorney general of India who, in a hearing before the court in July 2015, stated that there is no constitutionally guaranteed right to privacy. His reliance was on two Supreme Court judgments in MP Sharma v Satish Chandra (1954) and Kharak Singh v State of Uttar Pradesh (1962): both cases, decided by eight- and six-judge benches respectively, denied the existence of a constitutional right to privacy. As the subsequent judgments which upheld the right to privacy were by smaller benches, Rohatgi claimed that MP Sharma and Kharak Singh still prevailed over them, until they were overruled by a larger bench.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The reference to a larger bench has since delayed the entire matter, even as a number of government schemes have made Aadhaar mandatory. This reading of privacy as a unidimensional concept by the courts is, with due respect, erroneous. Privacy, as a concept, includes within its scope, spatial, familial, informational and decisional aspects. We all have a legitimate expectation of privacy in our private spaces, such as our homes, and in our personal relationships. Similarly, we must be able to exercise some control over how personal data, like our financial information, are disseminated. Most importantly, privacy gives us the space to make autonomous choices and decisions without external interference. All these dimensions of privacy must stand as distinct rights. In MP Sharma, the court rejected a certain aspect of the right of privacy by refusing to acknowledge a right against search and seizure. This, in no way prevented the court, even in the form of a smaller bench, from ruling on any other aspects of privacy, including those that are relevant to the Aadhaar case.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The limited referral to this bench means that the court will have to rule on the status of privacy and its possible limitations in isolation, without even going into the details of the Aadhaar case (based on the nature of protection that this bench accords to privacy, the petitioners and defendants in the Aadhaar case will have to argue afresh on whether the project does impede on this most fundamental right). There are no facts of the case to ground the legal principles in, and defining the contours of a right can be a difficult exercise. The court must be wary of how any limits they put on the right may be used in future. Equally, it is important to articulate that any limitations on the right to privacy due to competing interests such as national security and public interest must be imposed only when necessary and always be proportionate.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;It will not be enough for the court to merely state that we have a constitutional right to privacy. They would be well advised to cut through the muddle of existing privacy jurisprudence, and unequivocally establish the various facets of the right. Without that, we may not be able to withstand the modern dangers of surveillance, denial of bodily integrity and self-determination through forcible collection of information. The nine judges, in their collective wisdom, must not only ensure that we have a right to privacy, but also clearly articulate a robust reading of this right capable of withstanding the growing interferences with our autonomy.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/economic-times-july-23-2017-amber-sinha-aadhar-privacy-is-not-a-unidimensional-concept'&gt;https://cis-india.org/internet-governance/blog/economic-times-july-23-2017-amber-sinha-aadhar-privacy-is-not-a-unidimensional-concept&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Aadhaar</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-08-23T01:50:19Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/cis-statement-on-right-to-privacy-judgment">
    <title>CIS Statement on Right to Privacy Judgment</title>
    <link>https://cis-india.org/internet-governance/blog/cis-statement-on-right-to-privacy-judgment</link>
    <description>
        &lt;b&gt;In an emphatic endorsement of the right to privacy, a nine judge constitutional bench unanimously upheld a fundamental right to privacy. The events leading to this bench began during the hearings in the ongoing Aadhaar case, when in August 2015, Mukul Rohatgi, the then Attorney General stated that there is no constitutionally guaranteed right to privacy.&lt;/b&gt;
        
&lt;p style="text-align: justify;"&gt;reliance was on two Supreme Court judgments in MP Sharma v Satish Chandra (1954) and Kharak Singh v State of Uttar Pradesh (1962): both cases, decided by eight- and six-judge benches respectively, denied the existence of a constitutional right to privacy. As the subsequent judgments which upheld the right to privacy were by smaller benches, he claimed that MP Sharma and Kharak Singh still prevailed over them, until they were overruled by a larger bench. This landmark judgment was in response to a referral order to clear the confusion over the status of privacy as a right.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;We, at the Centre for Internet and Society (CIS) welcome this judgement and applaud the depth and scope of the Supreme Court’s reasoning. CIS has been producing research on the different aspects of the right to privacy and its implications for the last seven years and had the privilege of serving on the Justice AP Shah Committee and contributing to the Report of the Group of Experts on Privacy.&lt;a name="fr1" href="#fn1"&gt;[1]&lt;/a&gt; We are honoured that some of our research has also been cited by the judgment.&lt;a name="fr2" href="#fn2"&gt;[2] &lt;/a&gt;Such judicial recognition is evidence of the impact sound research can have on policymaking.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;In the course of a 547 page judgment, the bench affirmed the fundamental nature of the right to privacy reading it into the values of dignity and liberty. The judgment is instructive in its reference to scholarly works and jurisprudence not only in India but other legal systems such as USA, South Africa, EU and UK, while recognising a broad right to privacy with various dimensions across spatial, informational and decisional spheres. We note with special appreciation that women’s bodily integrity and citizens’ sexual orientation are among those aspects of privacy that were clearly recognised in the judgment. For researchers studying privacy and its importance, this judgment is of great value as it provides clear reasoning to reject oft-quoted arguments which are used to deny privacy’s significance. The judgement is also cognizant of the implications of the digital age and emphasise the need for a robust data protection framework.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The right to privacy has been read into into Article 21 (Right to life and liberty), and Part III (Chapter on Fundamental Rights) of the Constitution. This means that any limitation on the right in the form of reasonable restrictions must not only satisfy the tests evolved under Article 21, but where loss of privacy leads to infringement on other rights, such as chilling effects of surveillance on free speech, the tests for constitutionality under those provisions for also be satisfied by the limiting action. This provides a broad protection to citizens’ privacy which may not be easily restricted. We expect that this judgment will have far reaching impacts, not just with respect to the immediate Aadhaar case, but also to in a score of other matters such as protection of sexual choice by decriminalising Section 377 of the Indian Penal Code, oversight of statutory search and seizure provisions such as Section 132 of the Income Tax Act, personal data collection and processing practices by both state and private actors and mass surveillance programmes in the interest of national security.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;As this judgment comes in response to a referral order, the judges were not dealing with any questions of fact to ground the legal principles in. Subsequent judgments which deal with privacy will apply these principles and further evolve the contours of this right on a case-by-case basis. For now, we welcome this judgment and look forward to its consistent application in the future.&lt;/p&gt;
&lt;hr /&gt;
&lt;p&gt;[&lt;a name="fn1" href="#fr1"&gt;1&lt;/a&gt;]. http://planningcommission.nic.in/reports/genrep/rep_privacy.pdf&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;[&lt;a name="fn2" href="#fr2"&gt;2&lt;/a&gt;]. CIS was quoted in the judgement on footnote 46, page 33 and 34: &lt;a href="http://supremecourtofindia.nic.in/pdf/LU/ALL%20WP(C)%20No.494%20of%202012%20Right%20to%20Privacy.pdf"&gt;http://supremecourtofindia.nic.in/pdf/LU/ALL%20WP(C)%20No.494%20of%202012%20Right%20to%20Privacy.pdf &lt;/a&gt;The quote is " Illustratively, the Centre for Internet and Society has two interesting articles tracing the origin of privacy within Classical Hindu Law and Islamic Law. See Ashna Ashesh and Bhairav Acharya ,“Locating Constructs of Privacy within Classical Hindu Law”, The Centre for Internet and Society, available at &lt;a href="https://cis-india.org/internet-"&gt;https://cis-india.org/internet-&lt;/a&gt;governance/blog/loading-constructs-of-privacy-within-classical-hindu-law. See also Vidushi Marda and Bhairav Acharya, “Identifying Aspects of Privacy in Islamic Law”, The Centre for Internet and Society, available at &lt;a href="https://cis-india.org/internet-governance/blog/identifying-aspects-of-privacy-in-islamic-law"&gt;https://cis-india.org/internet-governance/blog/identifying-aspects-of-privacy-in-islamic-law&lt;/a&gt; " Further, research commissioned by CIS cited in the judgment includes a reference in page 201 footnote 319, "Bhairav Acharya, “The Four Parts of Privacy in India”, Economic &amp;amp; Political Weekly (2015), Vol. 50 Issue 22, at page 32."&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/cis-statement-on-right-to-privacy-judgment'&gt;https://cis-india.org/internet-governance/blog/cis-statement-on-right-to-privacy-judgment&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Featured</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-08-31T18:13:14Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/governing-id-kenya2019s-huduma-namba-programme">
    <title>Governing ID: Kenya’s Huduma Namba Programme</title>
    <link>https://cis-india.org/internet-governance/blog/governing-id-kenya2019s-huduma-namba-programme</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
&lt;p&gt;In our fourth case-study, we use our Evaluation Framework for Digital ID to examine the use of Digital ID in Kenya.&lt;/p&gt;
&lt;p&gt;Read the &lt;a class="external-link" href="https://digitalid.design/evaluation-framework-case-studies/kenya.html"&gt;case-study&lt;/a&gt; or download as &lt;a href="https://cis-india.org/internet-governance/digital-id-kenya-case-study" class="internal-link" title="Digital ID Kenya Case Study"&gt;PDF&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/governing-id-kenya2019s-huduma-namba-programme'&gt;https://cis-india.org/internet-governance/blog/governing-id-kenya2019s-huduma-namba-programme&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>internet governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Digital ID</dc:subject>
    
    
        <dc:subject>Digital Identity</dc:subject>
    

   <dc:date>2020-03-02T13:19:15Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/the-wire-amber-sinha-">
    <title>New Recommendations to Regulate Online Hate Speech Could Pose More Problems Than Solutions</title>
    <link>https://cis-india.org/internet-governance/blog/the-wire-amber-sinha-</link>
    <description>
        &lt;b&gt;The T.K. Viswanathan committee’s recommendations could prove to be dangerous for free speech if acted upon without resolving its flaws.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The article was published by &lt;a class="external-link" href="https://thewire.in/187381/new-recommendations-regulate-online-hate-speech-problems/"&gt;Wire&lt;/a&gt; on October 14, 2017&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;&lt;a title="It was reported last week" href="https://thewire.in/184920/post-section-66a-central-panel-tells-government-to-amend-ipc-crpc-it-act-to-punish-online-hate-speech/" rel="noopener
        noreferrer" target="_blank"&gt;&lt;span&gt;It was reported last week&lt;/span&gt;&lt;/a&gt; that an expert       committee headed by T.K. Viswanathan, former secretary general of       Lok Sabha, recommended that the Indian Penal Code (IPC), the Code       of Criminal Procedure and the Information Technology Act be       amended to include stringent penal provisions regarding online       hate speech. While this report has not been made public, &lt;a title="the Indian
        Express reported" href="http://indianexpress.com/article/india/hate-speech-online-punishment-supreme-court-section-66a-information-technology-act-narendra-modi-4876648/" rel="external nofollow" target="_blank"&gt;&lt;span&gt;the&lt;em&gt; Indian Express&lt;/em&gt; reported&lt;/span&gt;&lt;/a&gt; that       the committee’s recommendations include, among other things,       insertion and expansion of penal provisions in the IPC on       ‘incitement to hatred’ (Section 153C) and ‘causing fear, alarm or       provocation of violence’ (Section 505A) to include online speech,       and creation of the offices of state cyber crime coordinator and       district cyber crime cell.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Online hate speech has been among the more complex issues with       regard to the regulation of technology. The complexity of       restricting hate speech has to do with a number of factors,       including the ubiquity of strong opinions in online speech, often       offensive to certain groups, the interplay between individual and       group rights, and the tensions between the values of dignity,       liberty and equality. Siddharth Narrain has &lt;a title="pointed out" href="http://jmi.ac.in/upload/menuupload/16_ccmg_epwsedition.pdf" rel="external nofollow" target="_blank"&gt;&lt;span&gt;pointed         out&lt;/span&gt;&lt;/a&gt; in his thesis on hate speech law that the use of law to       curb offensive or hurtful speech has been done by religious       groups, caste based groups, occupation based groups with strong       caste associations, language groups and gender based groups. The       range of actions arising from such uses of the law include the       banning of books, criminal proceedings for political satire, or       even ‘liking’ political posts on social media.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The relationship between speech acts and acts of violence is a       complicated issue with little consensus on appropriate ways to       regulate it. Scholars such as Jonathan Maynard have advocated       greater reliance on non-legal responses such as counter speech, as       the use of criminal law to tackle speech often has the effect of       chilling forms of dissent. The f&lt;span&gt;&lt;span&gt;ormulation and application of legal           tests in criminal law with respect to hate speech is also hard           as hate speech has much to do with the content of speech as it           has to do with the context, including factors such as power           structures.&lt;/span&gt; &lt;span&gt;Speech by a           figure in a position of power also has a greater likelihood to           result in a call for violence. &lt;/span&gt;&lt;/span&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Before looking at the specific recommendations made by the T.K.       Viswanathan committee, it would be worthwhile to also look at the       background of this committee. The committee notes with approval       the &lt;a title="Law Commission of
        India’s 267th report on the issue of hate speech" href="http://lawcommissionofindia.nic.in/reports/Report267.pdf" rel="external nofollow" target="_blank"&gt;&lt;span&gt;Law Commission         of India’s 267th report on the issue of hate speech&lt;/span&gt;&lt;/a&gt;. The Law       Commission, in turn, was acting at the behest of observations made       by the Supreme Court in &lt;a title="Pravasi Bhalai
        Sangathan v. Union of India" href="https://indiankanoon.org/docfragment/61854231/?formInput=ramesh%20union%20india%20" rel="external nofollow" target="_blank"&gt;&lt;span&gt;&lt;i&gt;Pravasi Bhalai Sangathan&lt;/i&gt; v.         &lt;i&gt;Union of India&lt;/i&gt;&lt;/span&gt;&lt;/a&gt; in 2014. In this case, the Supreme       Court exhibited judicial restraint and refused to frame guidelines       prohibiting political hate speech, and had instead requested the       Law Commission to look into it. However, the court noted with       approval international case law on the issues, particularly the       observations in the Canadian case &lt;a title="Saskatchewan v. Whatcott" href="https://scc-csc.lexum.com/scc-csc/scc-csc/en/item/12876/index.do" rel="external nofollow" target="_blank"&gt;&lt;span&gt;&lt;i&gt;Saskatchewan&lt;/i&gt; v. &lt;i&gt;Whatcott&lt;/i&gt;&lt;/span&gt;&lt;/a&gt;.       Relying on &lt;i&gt;Whatcott&lt;/i&gt;, the Supreme Court provides a       definition of hate speech that includes the following statements:&lt;/p&gt;
&lt;blockquote style="text-align: justify; "&gt;
&lt;p&gt;“Hate speech is an effort to marginalise individuals based on         their membership in a group. Using expression that exposes the         group to hatred, hate speech seeks to delegitimise group members         in the eyes of the majority, reducing their social standing and         acceptance within society. Hate speech, therefore, rises beyond         causing distress to individual group members..[and] lays the         groundwork for later, broad attacks on vulnerable that can range         from discrimination, to ostracism, segregation, deportation,         violence and, in the most extreme cases, to genocide. Hate         speech also impacts a protected group’s ability to respond to         the substantive ideas under debate, thereby placing a serious         barrier to their full participation in our democracy.”&lt;/p&gt;
&lt;/blockquote&gt;
&lt;p style="text-align: justify; "&gt;Thus, it is evident that the Supreme Court itself clearly states       that hate speech must be viewed through the lens of the right to       equality, and relates to speech not merely offensive or hurtful to       specific individuals, but also inciting discrimination or violence       on the basis of inclusion of individuals within certain groups. It       is important to note that it is the consequence of speech that is       the determinative factor in interpreting hate speech, more so than       even perhaps the content of the speech. This is also broadly       reflected in the Law Commission’s report that identifies the       status of the author of the speech, the status of victims of the       speech, the potential impact of the speech and whether it amounts       to incitement as key identifying criteria of hate speech.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;However, in the commission’s recommendations, these principles       are not fairly represented in the suggested new Sections 153C and       505A, as per a &lt;a title="draft released" href="https://internetfreedom.in/government-committee-wants-to-bring-back-section-66a/" rel="external nofollow" target="_blank"&gt;&lt;span&gt;draft         released&lt;/span&gt;&lt;/a&gt; by the Internet Freedom Foundation. Section 505A,       for instance, refers to “highly disparaging, indecent, abusive,       inflammatory, false or grossly offensive information” and       “derogatory information.” These are extremely broad terms, not       having any guiding jurisprudence within Indian or international       law, which may be helpful in restrictively interpreting them. It       is important to note the similarities between this provision and       the repealed Section 66A of the Information Technology Act, which       sought to criminalise speech that was “grossly offensive,” having       “menacing character,” or “causing       annoyance..danger..insult..enmity, hatred or ill will.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;These terms in the recommended Section 505A also run foul of the       observations of Justice Nariman in &lt;em&gt;&lt;a title="Shreya
          Singhal v. Union of India" href="https://cis-india.org/internet-governance/blog/shreya-singhal-judgment.pdf" rel="external nofollow" target="_blank"&gt;&lt;span&gt;Shreya Singhal v. Union of India&lt;/span&gt;&lt;/a&gt;,&lt;/em&gt; where       he took exception to the nature of the terms in Section 66A by       stating that, “Information that may be grossly offensive or which       causes annoyance or inconvenience are undefined terms which take       into the net a very large amount of protected and innocent       speech.” While these terms are somewhat tempered in this provision       with a requirement to show intent to “cause fear of injury or       alarm,” they remain exceedingly broad and contrary to the       requirement that restrictions on speech must be couched in the       narrowest possible terms.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The T.K. Viswanathan committee, in addition, seeks to bring,       within the scope of the prospective Sections 153C and 505A,       electronic speech. As per its recommendations, ‘means of       communication’ would include “any words either spoken or written,       signs, visible representations, information, audio, video or       combination of both transmitted, retransmitted or sent through any       telecommunication service, communication device or computer       resource.” This could have the impact of bringing in a provision       that has some similar effects as that of the now defunct Section       66A of the Information Technology Act. The lack of regard for the       Supreme Court’s observations on hate speech, the need to look at       it through the lens of equality and the over-broadness of       restrictions on speech are likely to be dangerous for free speech       if the recommendations of this committee are acted upon.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/the-wire-amber-sinha-'&gt;https://cis-india.org/internet-governance/blog/the-wire-amber-sinha-&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Hate Speech</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    

   <dc:date>2018-01-02T03:06:18Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/it-for-change-amber-sinha-regulating-sexist-online-harassment">
    <title>Regulating Sexist Online Harassment as a Form of Censorship</title>
    <link>https://cis-india.org/internet-governance/blog/it-for-change-amber-sinha-regulating-sexist-online-harassment</link>
    <description>
        &lt;b&gt;This paper is part of a series under IT for Change’s project, Recognize, Resist, Remedy: Combating Sexist Hate Speech Online. The series, titled Rethinking Legal-Institutional Approaches to Sexist Hate Speech in India, aims to create a space for civil society actors to proactively engage in the remaking of online governance, bringing together inputs from legal scholars, practitioners, and activists. The papers reflect upon the issue of online sexism and misogyny, proposing recommendations for appropriate legal-institutional responses. The series is funded by EdelGive Foundation, India and International Development Research Centre, Canada.&lt;/b&gt;
        &lt;p&gt;&lt;span&gt;Introduction&lt;/span&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The proliferation of internet use was expected to facilitate greater online participation of women and &lt;a class="external-link" href="https://ssrn.com/abstract=2039116"&gt;other marginalised groups&lt;/a&gt;.  However, over the past few years, as more and more people have come online, it is evident that social power in online spaces mirrors offline hierarchies. While identity and security thefts may be universal experiences, women and the LGBTQ+ community continue to face barriers to safety that men often do not, aside from structural barriers to access. Sexist harassment pervades the online experience of women, be it on dating sites, &lt;a class="external-link" href="https://academic.oup.com/bjc/article/57/6/1462/2623986"&gt;online forums, or social media&lt;/a&gt;.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In her book, &lt;i&gt;&lt;a class="external-link" href="https://yalebooks.yale.edu/book/9780300215120/twitter-and-tear-gas"&gt;Twitter and Tear Gas: The Power and Fragility of Networked Protest&lt;/a&gt;&lt;/i&gt;, Zeynep Tufekci argues that the nature and impact of censorship on social media are very different. Earlier, censorship was enacted by restricting speech. But now, it also works in the form of organised harassment campaigns, which use the qualities of viral outrage to impose a disproportionate cost on the very act of speaking out. Therefore, censorship plays out not merely in the form of the removal of speech but through disinformation and hate speech campaigns.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In most cases, this censorship of content does not necessarily meet the threshold of hate speech, and free speech advocates have traditionally argued for counter speech as the most effective response to such speech acts. However, the structural and organised nature of harassment and extreme speech often renders counter speech ineffective. This paper will explore the nature of online sexist hate and extreme speech as a mode of censorship. Online sexualised harassment takes various forms including doxxing, cyberbullying, stalking, identity theft, incitement to violence, etc. While there are some regulatory mechanisms – either in law, or in the form of community guidelines that address them, this paper argues for the need to evolve a composite framework that looks at the impact of such censorious acts on online speech and regulatory strategies to address them.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://cis-india.org/internet-governance/files/it-for-change-february-2021-amber-sinha-regulating-sexist-online-harassment.pdf/at_download/file" class="external-link"&gt;Click on to read the full text&lt;/a&gt; [PDF; 495 Kb]&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/it-for-change-amber-sinha-regulating-sexist-online-harassment'&gt;https://cis-india.org/internet-governance/blog/it-for-change-amber-sinha-regulating-sexist-online-harassment&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2021-05-31T09:56:31Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/digital-policy-portal-july-13-2016-new-approaches-to-information-privacy-revisiting-the-purpose-limitation-principle">
    <title>New Approaches to Information Privacy – Revisiting the Purpose Limitation Principle</title>
    <link>https://cis-india.org/internet-governance/blog/digital-policy-portal-july-13-2016-new-approaches-to-information-privacy-revisiting-the-purpose-limitation-principle</link>
    <description>
        &lt;b&gt;Article on Aadhaar throwing light on privacy and data protection.&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;This was &lt;a class="external-link" href="http://www.digitalpolicy.org/revisiting-the-principles-of-purpose-limitation-under-existing-data-protection-norms/"&gt;published in Digital Policy Portal&lt;/a&gt; on July 13, 2016.&lt;/p&gt;
&lt;hr /&gt;
&lt;h3&gt;Introduction&lt;/h3&gt;
&lt;p style="text-align: justify;"&gt;Last year, Mukul Rohatgi, the Attorney General of India, called into question existing jurisprudence of the last 50 years on the constitutional validity of the right to privacy.&lt;sup&gt;1&lt;/sup&gt; Mohatgi was rebutting the arguments on privacy made against Aadhaar, the unique identity project initiated and implemented in the country without any legislative mandate.&lt;sup&gt;2&lt;/sup&gt; The question of the right to privacy becomes all the more relevant in the context of events over the last few years—among them, the significant rise in data collection by the state through various e-governance schemes,&lt;sup&gt;3&lt;/sup&gt; systematic access to personal data by various wings of the state through a host of surveillance and law enforcement initiatives launched in the last decade,&lt;sup&gt;4&lt;/sup&gt; the multifold increase in the number of Indians online, and the ubiquitous collection of personal data by private parties.&lt;sup&gt;5&lt;/sup&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;These developments have led to a call for a comprehensive privacy legislation in India and the adoption of the National Privacy Principles as laid down by the Expert Committee led by Justice AP Shah.&lt;sup&gt;6&lt;/sup&gt; There are privacy-protection legislation currently in place such as the Information Technology Act, 2000 (IT Act), which was enacted to govern digital content and communication and provide legal recognition to electronic transactions. This legislation has provisions that can safeguard—and dilute—online privacy. At the heart of the data protection provisions in the IT Act lies section 43A and the rules framed under it, i.e., Reasonable security practices and procedures and sensitive personal data information.&lt;sup&gt;7&lt;/sup&gt;Section 43A mandates that body corporates who receive, possess, store, deal, or handle any personal data to implement and maintain ‘reasonable security practices’, failing which, they are held liable to compensate those affected. Rules drafted under this provision also mandated a number of data protection obligations on corporations such the need to seek consent before collection, specifying the purposes of data collection, and restricting the use of data to such purposes only. There have been questions raised about the validity of the Section 43A Rules as they seek to do much more than mandate in the parent provisions, Section 43A— requiring entities to maintain reasonable security practices.&lt;/p&gt;
&lt;h3&gt;Privacy as control?&lt;/h3&gt;
&lt;p style="text-align: justify;"&gt;Even setting aside the issue of legal validity, the kind of data protection framework envisioned by Section 43A rules is proving to be outdated in the context of how data is now being collected and processed. The focus of Section 43 A Rules—as well as that of draft privacy legislations in India&lt;sup&gt;8&lt;/sup&gt;—is based on the idea of individual control. Most apt is Alan Westin’s definition of privacy: “the claim of individuals, groups, or institutions to determine for themselves when, how, and to what extent information about them is communicated to other.”&lt;sup&gt;9&lt;/sup&gt; Westin and his followers rely on the normative idea of “informational self- determination”, the notion of a pure, disembodied, and atomistic self, capable of making rational and isolated choices in order to assert complete control over personal information. More and more this has proved to be a fiction especially in a networked society.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Much before the need for governance of information technologies had reached a critical mass in India, Western countries were already dealing with the implications of the use of these technologies on personal data. In 1973, the US Department of Health, Education and Welfare appointed a committee to address this issue, leading to a report called ‘Records, Computers and Rights of Citizens.’&lt;sup&gt;10&lt;/sup&gt; The Committee’s mandate was to “explore the impact of computers on record keeping about individuals and, in addition, to inquire into, and make recommendations regarding, the use of the Social Security number.” The Report articulated five principles which were to be the basis of fair information practices: transparency; use limitation; access and correction; data quality; and security. Building upon these principles, the Committee of Ministers of the Organization for Economic Cooperation and Development (OECD) arrived at the Guidelines on the Protection of Privacy and Transborder Flows of Personal Data in 1980.&lt;sup&gt;11&lt;/sup&gt; These principles— Collection Limitation, Data Quality, Purpose Specification, Use Limitation, Security Safeguards, Openness, Individual Participation and Accountability—are what inform most data protection regulations today including the APEC Framework, the EU Data Protection Directive, and the Section 43A Rules and Justice AP Shah Principles in India.&lt;/p&gt;
&lt;p&gt;Fred Cate describes the import of these privacy regimes as such:&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;“All of these data protection instruments reflect the same approach: tell individuals what data you wish to collect or use, give them a choice, grant them access, secure those data with appropriate technologies and procedures, and be subject to third-party enforcement if you fail to comply with these requirements or individuals’ expressed preferences”&lt;sup&gt;12&lt;/sup&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;This is in line with Alan Westin’s idea of privacy exercised through individual control. Therefore the focus of these principles is on empowering the individuals to exercise choice, but not on protecting individuals from harmful or unnecessary practices of data collection and processing. The author of this article has earlier written&lt;sup&gt;13&lt;/sup&gt; about the sheer inefficacy of this framework which places the responsibility on individuals. Other scholars like Daniel Solove,&lt;sup&gt;14&lt;/sup&gt; Jonathan Obar&lt;sup&gt;15&lt;/sup&gt; and Fred Cate&lt;sup&gt;16&lt;/sup&gt; have also written about the failure of traditional data protection practices of notice and consent. While these essays dealt with the privacy principles of choice and informed consent, this paper will focus on the principles of purpose limitation.&lt;/p&gt;
&lt;h3&gt;Purpose Limitation and Impact of Big Data&lt;/h3&gt;
&lt;p&gt;The principles of purpose limitation or purpose specification seeks to ensure the following four objectives:&lt;/p&gt;
&lt;ol style="list-style-type: lower-alpha;"&gt;
&lt;li&gt;Personal information collected and processed should be adequate and relevant to the purposes for which they are processed.&lt;/li&gt;
&lt;li&gt;The entities collect, process, disclose, make available, or otherwise use personal information only for the stated purposes.&lt;/li&gt;
&lt;li&gt;In case of change in purpose, the data’s subject needs to be informed and their consent has to be obtained.&lt;/li&gt;
&lt;li&gt;After personal information has been used in accordance with the identified purpose, it has to be destroyed as per the identified procedures.&lt;/li&gt;&lt;/ol&gt;
&lt;p style="text-align: justify;"&gt;The purpose limitation along with the data minimisation principle—which requires that no more data may be processed than is necessary for the stated purpose—aim to limit the use of data to what is agreed to by the data subject. These principles are in direct conflict with new technology which relies on ubiquitous collection and indiscriminate uses of data. The main import of Big Data technologies on the inherent value in data which can be harvested not by the primary purposes of data collection but through various secondary purposes which involve processing of the data repeatedly.&lt;sup&gt;17&lt;/sup&gt;Further, instead to destroying the data when its purpose has been achieved, the intent is to retain as much data as possible for secondary uses. Importantly, as these secondary uses are of an inherently unanticipated nature, it becomes impossible to account for it at the stage of collection and providing the choice to the data subject.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Followers of the discourse on Big Data would be well aware of its potential impacts on privacy. De-identification techniques to protect the identities of individuals in dataset face a threat from an increase in the amount of data available either publicly or otherwise to a party seeking to reverse-engineer an anonymised dataset to re-identify individuals. &lt;sup&gt;18&lt;/sup&gt; Further, Big Data analytics promise to find patterns and connections that can contribute to the knowledge available to the public to make decisions. What is also likely is that it will lead to revealing insights about people that they would have preferred to keep private.&lt;sup&gt;19&lt;/sup&gt;In turn, as people become more aware of being constantly profiled by their actions, they will self-regulate and ‘discipline’ their behaviour. This can lead to a chilling effect.&lt;sup&gt;20&lt;/sup&gt; Meanwhile, Big Data is also fuelling an industry that incentivises businesses to collect more data, as it has a high and growing monetary value. However, Big Data also promises a completely new kind of knowledge that can prove to be revolutionary in fields as diverse as medicine, disaster-management, governance, agriculture, transport, service delivery, and decision-making.&lt;sup&gt;21&lt;/sup&gt; As long as there is a sufficiently large and diverse amount of data, there could be invaluable insights locked in it, accessing which can provide solutions to a number of problems. In light of this, it is important to consider what kind of regulatory framework is most suitable which could facilitate some of the promised benefits of Big Data and at the same time mitigate its potential harm. This, coupled with the fact that the existing data protection principles have, by most accounts, run their course, makes the examination of alternative frameworks even more important. This article will examine some alternate proposals made to the existing framework of purpose limitation below.&lt;/p&gt;
&lt;h3&gt;Harms-based approach&lt;/h3&gt;
&lt;p style="text-align: justify;"&gt;Some scholars like Fred Cate&lt;sup&gt;22&lt;/sup&gt; and Daniel Solove&lt;sup&gt;23&lt;/sup&gt; have argued that there is a need for the primary focus of data protection law to move from control at the stage of data collection to actual use cases. In his article on the failure of Fair Information Practice Principles,&lt;sup&gt;24&lt;/sup&gt;Cate puts forth a proposal for ‘Consumer Privacy Protection Principles.’ Cate envisions a more interventionist role of the data protection authorities by regulating information flows when required, in order to protect individuals from risky or harmful uses of information. Cate’s attempt is to extend the principles of consumer protection law of prevention and remedy of harms.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;In a re-examination of the OECD Privacy Principles, Cate and Viktor Mayer Schöemberger attempt to discard the use of personal data to only purposes specified. They felt that restricting the use of personal to only specified purposes could significantly threaten various research and beneficial uses of Big Data. Instead of articulating a positive obligations of what personal data collected could be used for, they attempt to arrive at a negative obligation of use-cases prevented by law. Their working definition of the Use specification principle broaden the scope of use cases by only preventing use of data “if the use is fraudulent, unlawful, deceptive or discriminatory; society has deemed the use inappropriate through a standard of unfairness; the use is likely to cause unjustified harm to the individual; or the use is over the well-founded objection of the individual, unless necessary to serve an over-riding public interest, or unless required by law.”&lt;sup&gt;25&lt;/sup&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;While most standards in the above definition have established understanding in jurisprudence, the concept of unjustifiable harm is what we are interested in. Any theory of harms-based approach goes back to John Stuart Mill’s dictum that the only justifiable purpose to exert power over the will of an individual is to prevent harm to others. Therefore, any regulation that seeks to control or prevent autonomy of individuals (in this case, the ability of individuals to allow data collectors to use their personal data, and the ability of data collectors to do so, without any limitation) must clearly demonstrate the harm to the individuals in question.&lt;/p&gt;
&lt;p&gt;Fred Cate articulates the following steps to identify tangible harm and respond to its presence:&lt;sup&gt;26&lt;/sup&gt;&lt;/p&gt;
&lt;ol style="list-style-type: lower-alpha;"&gt;
&lt;li&gt;Focus on Use — Actual use of the data should be considered, not mere possession. The assumption is that the collection, possession, or transfer of information do not significantly harm people, rather it is the use of information following such collection, possession, or transfer.&lt;/li&gt;
&lt;li&gt;Proportionality — Any regulatory measure must be proportional to the likelihood and severity of the harm identified.&lt;/li&gt;
&lt;li&gt;Per se Harmful Uses — Uses which are always harmful must be prohibited by law&lt;/li&gt;
&lt;li&gt;Per se not Harmful Uses — If uses can be considered inherently not harmful, they should not be regulated.&lt;/li&gt;
&lt;li&gt;Sensitive Uses — In case where the uses are not per se harmful or not harmful, individual consent must be sought for using that data for those purposes.&lt;/li&gt;&lt;/ol&gt;
&lt;p style="text-align: justify;"&gt;The proposal by Cate argues for what is called a ‘use based system’, which is extremely popular with American scholars. Under this system, data collection itself is not subject to restrictions; rather, only the use of data is regulated. This argument has great appeal for both businesses who can reduce their overheads significantly if consent obligations are done away with as long as they use the data in ways which are not harmful, as well as critics of the current data protection framework which relies on informed consent. Lokke Moerel explains the philosophy of ‘harms based approach’ or ‘use based system’ in United States by juxtaposing it against the ‘rights based approach’ in Europe.&lt;sup&gt;27&lt;/sup&gt; In Europe, rights of individuals with regard to processing of their personal data is a fundamental human right and therefore, a precautionary principle is followed with much greater top-down control upon data collection. However, in the United States, there is a far greater reliance on market mechanisms and self-regulating organisations to check inappropriate processing activities, and government intervention is limited to cases where a clear harm is demonstrable.&lt;sup&gt;28&lt;/sup&gt;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Continuing research by the Centre for Information Policy Leadership under its Privacy Risk Framework Project looks at a system of articulating what harms and risks arising from use of collected data. They have arrived a matrix of threats and harms. Threats are categorised as —a) inappropriate use of personal information and b) personal information in the wrong hands. More importantly for our purposes, harms are divided into: a) tangible harms which are physical or economic in nature (bodily harm, loss of liberty, damage to earning power and economic interests); b) intangible harms which can be demonstrated (chilling effects, reputational harm, detriment from surveillance, discrimination and intrusion into private life); and c) societal harm (damage to democratic institutions and loss of social trust).&lt;sup&gt;29&lt;/sup&gt;For any harms-based system, a matrix like above needs to emerge clearly so that regulation can focus on mitigating practices leading to the harms.&lt;/p&gt;
&lt;h3&gt;Legitimate interests&lt;/h3&gt;
&lt;p style="text-align: justify;"&gt;Lokke Moerel and Corien Prins, in their article “Privacy for Homo Digitalis – Proposal for a new regulatory framework for data protection in the light of Big Data and Internet of Things”&lt;sup&gt;30&lt;/sup&gt; use the ideal of responsive regulation which considers empirically observable practices and institutions while determining the regulation and enforcement required. They state that current data protection frameworks—which rely on mandating some principles of how data has to be processed—is exercised through merely procedural notification and consent requirements. Further, Moerel and Prins feel that data protection law cannot only involve a consideration of individual interest but also needs to take into account collective interest. Therefore, the test must be a broader assessment than merely the purpose limitation articulating the interests of the parties directly involved, but whether a legitimate interest is achieved.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Legitimate interest has been put forth as an alternative to the purpose limitation. Legitimate is not a new concept and has been a part of the EU Data Protection Directive and also finds a place in the new General Data Protection Regulation. Article 7 (f) of the EU Directive&lt;sup&gt;31&lt;/sup&gt; provided for legitimate interest balanced against the interests or fundamental rights and freedoms of the data subject as the last justifiable reason for use of data. Due to confusion in its interpretation, the Article 29 Working Party, in 2014,&lt;sup&gt;32&lt;/sup&gt;looked into the role of legitimate interest and arrived at the following factors to determine the presence of a legitimate interest— a) the status of the individual (employee, consumer, patient) and the controller (employer, company in a dominant position, healthcare service); b) the circumstances surrounding the data processing (contract relationship of data subject and processor); c) the legitimate expectations of the individual.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Federico Ferretti has criticised the legitimate interest principle as vague and ambiguous. The balancing of legitimate interest in using the data against fundamental rights and freedoms of the data subject gives the data controllers some degree of flexibility in determining whether data may be processed; however, this also reduces the legal certainty that data subject have of their data not being used for purposes they have not agreed to.&lt;sup&gt;33&lt;/sup&gt;However, it is this paper’s contention that it is not the intent of the legitimate interest criteria but the lack of consensus on its application which creates an ambiguity. Moerel and Prins articulate a test for using legitimate interest which is cognizant of the need to use data for the purpose of Big Data processing, as well as ensuring that the rights of data subjects are not harmed.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;As demonstrated earlier, the processing of data and its underlying purposes have become exceedingly complex and the conventional tool to describe these processes ‘privacy notices’ are too lengthy, too complex and too profuse in numbers to have any meaningful impact.&lt;sup&gt;34&lt;/sup&gt;The idea of information self-determination, as contemplated by Westin in American jurisprudence, is not achieved under the current framework. Moerel and Prins recommend five factors&lt;sup&gt;35&lt;/sup&gt; as relevant in determining the legitimate interest. Of the five, the following three are relevant to the present discussion:&lt;/p&gt;
&lt;ol style="list-style-type: lower-alpha;"&gt;
&lt;li style="text-align: justify;"&gt;Collective Interest — A cost-benefit analysis should be conducted, which examines the implications for privacy for the data subjects as well as the society, as a whole.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;The nature of the data — Rather than having specific categories of data, the nature of data needs to be assessed contextually to determine legitimate interest.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Contractual relationship and consent not independent grounds — This test has two parts. First, in case of contractual relationship between data subject and data controller: the more specific the contractual relationship, the more restrictions apply to the use of the data. Second, consent does not function as a separate principle which, once satisfied, need not be revisited. The nature of the consent (opportunities made available to data subject, opt in/opt out, and others) will continue to play a role in determining legitimate interest.&lt;/li&gt;&lt;/ol&gt;
&lt;h3&gt;Conclusion&lt;/h3&gt;
&lt;p style="text-align: justify;"&gt;Replacing the purpose limitation principles with a use-based system as articulated above poses the danger of allowing governments and the private sector to carry out indiscriminate data collection under the blanket guise that any and all data may be of some use in the future. The harms-based approach has many merits and there is a stark need for more use of risk assessments techniques and privacy impact assessments in data governance. However, it is important that it merely adds to the existing controls imposed at data collection, and not replace them in their entirety. On the other hand, the legitimate interests principle, especially as put forth by Moerel and Prins, is more cognizant of the different factors at play — the inefficacy of existing purpose limitation principles, the need for businesses to use data for purposes unidentified at the stage of collection, and the need to ensure that it is not misused for indiscriminate collection and purposes. However, it also poses a much heavier burden on data controllers to take into account various factors before determining legitimate interest. If legitimate interest has to emerge as a realistic alternative to purpose limitation, there needs to be greater clarity on how data controllers must apply this principle.&lt;/p&gt;
&lt;h3&gt;Endnotes&lt;/h3&gt;
&lt;ol&gt;
&lt;li style="text-align: justify;"&gt;Prachi Shrivastava, “Privacy not a fundamental right, argues Mukul Rohatgi for Govt as Govt affidavit says otherwise,” Legally India, Jyly 23, 2015, http://www.legallyindia.com/Constitutional-law/privacy-not-a-fundamental-right-argues-mukul-rohatgi-for-govt-as-govt-affidavit-says-otherwise.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt; Rebecca Bowe, “Growing Mistrust of India’s Biometric ID Scheme,” Electronic Frontier Foundation, May 4, 2012, https://www.eff.org/deeplinks/2012/05/growing-mistrust-india-biometric-id-scheme.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Lisa Hayes, “Digital India’s Impact on Privacy: Aadhaar numbers, biometrics, and more,” Centre for Democracy and Technology, January 20, 2015, https://cdt.org/blog/digital-indias-impact-on-privacy-aadhaar-numbers-biometrics-and-more/.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;“India’s Surveillance State,” Software Freedom Law Centre, http://sflc.in/indias-surveillance-state-our-report-on-communications-surveillance-in-india/.&lt;/li&gt;
&lt;li&gt;“Internet Privacy in India,” Centre for Internet and Society, http://cis-india.org/telecom/knowledge-repository-on-internet-access/internet-privacy-in-india.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Vivek Pai, “Indian Government says it is still drafting privacy law, but doesn’t give timelines,” Medianama, May 4, 2016, http://www.medianama.com/2016/05/223-government-privacy-draft-policy/.&lt;/li&gt;
&lt;li&gt;Information Technology (Intermediaries Guidelines) Rules, 2011,&lt;br /&gt; http://deity.gov.in/sites/upload_files/dit/files/GSR314E_10511%281%29.pdf.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Discussion Points for the Meeting to be taken by Home Secretary at 2:30 pm on 7-10-11 to discuss the drat Privacy Bill, http://cis-india.org/internet-governance/draft-bill-on-right-to-privacy.&lt;/li&gt;
&lt;li&gt;Alan Westin, Privacy and Freedom (New York: Atheneum, 2015).&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;US Secretary’s Advisory Committee on Automated Personal Data Systems, Records, Computers and the Rights of Citizens, http://www.justice.gov/opcl/docs/rec-com-rights.pdf.&lt;/li&gt;
&lt;li&gt;OECD Guidelines on the Protection of Privacy and Transborder Flows of Personal Data, http://www.oecd.org/sti/ieconomy/oecdguidelinesontheprotectionofprivacyandtransborderflowsofpersonaldata.htm&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Fred Cate, “The Failure of Information Practice Principles,” in Consumer Protection in the Age of the Information Economy, ed. Jane K. Winn (Burlington: Aldershot, Hants, England, 2006) http://papers.ssrn.com/sol3/papers.cfm?abstract_id=1156972.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Amber Sinha and Scott Mason, “A Critique of Consent in Informational Privacy,” Centre for Internet and Society, January 11, 2016, http://cis-india.org/internet-governance/blog/a-critique-of-consent-in-information-privacy.&lt;/li&gt;
&lt;li&gt;Daniel Solove, “Privacy self-management and consent dilemma,” Harvard Law Review 126, (2013): 1880.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Jonathan Obar, “Big Data and the Phantom Public: Walter Lippmann and the fallacy of data privacy self management,” Big Data and Society 2(2), (2015), doi: 10.1177/2053951715608876.&lt;/li&gt;
&lt;li&gt;Supra Note 12.&lt;/li&gt;
&lt;li&gt;Supra Note 14.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Paul Ohm, “Broken Promises of Privacy: Responding to the Surprising Failure of Anonymization” available at http://papers.ssrn.com/sol3/papers.cfm?abstract_id=1450006; Arvind Narayanan and Vitaly Shmatikov, “Robust De-anonymization of Large Sparse Datasets” available at https://www.cs.utexas.edu/~shmat/shmat_oak08netflix.pdf.&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;D. Hirsch, “That’s Unfair! Or is it? Big Data, Discrimination and the FTC’s Unfairness Authority,” Kentucky Law Journal, Vol. 103, available at: http://www.kentuckylawjournal.org/wp-content/uploads/2015/02/103KyLJ345.pdf&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;A Marthews and C Tucker, “Government Surveillance and Internet Search Behavior”, available at http://ssrn.com/abstract=2412564; Danah Boyd and Kate Crawford, “Critical Questions for Big Data: Provocations for a cultural, technological, and scholarly phenomenon”, Information, Communication &amp;amp; Society, Vol. 15, Issue 5, (2012).&lt;/li&gt;
&lt;li style="text-align: justify;"&gt;Scott Mason, “Benefits and Harms of Big Data”, Centre for Internet and Society, available at http://cis-india.org/internet-governance/blog/benefits-and-harms-of-big-data#_ftn37.&lt;/li&gt;
&lt;li&gt;Cate, “The Failure of Information Practice Principles.”&lt;/li&gt;
&lt;li&gt;Solove, “Privacy self-management and consent dilemma,” 1882.&lt;/li&gt;
&lt;li&gt;Cate, “The Failure of Information Practice Principles.”&lt;/li&gt;
&lt;li&gt;Fred Cate and Viktor Schoenberger, “Notice and Consent in a world of Big Data,” International Data Privacy Law 3(2), (2013): 69.&lt;/li&gt;
&lt;li&gt;Solove, “Privacy self-management and consent dilemma,” 1883.&lt;/li&gt;
&lt;li&gt;Lokke Moerel, “Netherlands: Big Data Protection: How To Make The Draft EU Regulation On Data Protection Future Proof”, Mondaq, March 11. 2014, http://www.mondaq.com/x/298416/data+protection/Big+Data+Protection+How+To+Make+The+Dra%20ft+EU+Regulation+On+Data+Protection+Future+Proof%20al%20Lecture.&lt;/li&gt;
&lt;li&gt;Moerel, “Netherlands: Big Data Protection.”&lt;/li&gt;
&lt;li&gt;Centre for Information Policy Leadership, “A Risk-based Approach to Privacy: Improving Effectiveness in Practice,” Hunton and Williams LLP, June 19, 2014, https://www.informationpolicycentre.com/uploads/5/7/1/0/57104281/white_paper_1-a_risk_based_approach_to_privacy_improving_effectiveness_in_practice.pdf.&lt;/li&gt;
&lt;li&gt;Lokke Moerel and Corien Prins, “Privacy for Homo Digitalis: Proposal for a new regulatory framework for data protection in the light of Big Data and Internet of Things”, Social Science Research Network, May 25, 2016, http://papers.ssrn.com/sol3/papers.cfm?abstract_id=2784123.&lt;/li&gt;
&lt;li&gt;EU Directive 95/46/EC – The Data Protection Directive, https://www.dataprotection.ie/docs/EU-Directive-95-46-EC-Chapter-2/93.htm.&lt;/li&gt;
&lt;li&gt;Article 29 Data Protection Working Party, “Opinion 06/2014 on the notion of legitimate interests of the data controller under Article 7 of Directive 95/46/EC,” http://ec.europa.eu/justice/data-protection/article-29/documentation/opinion-recommendation/files/2014/wp217_en.pdf.&lt;/li&gt;
&lt;li&gt;Frederico Ferretti, “Data protection and the legitimate interest of data controllers: Much ado about nothing or the winter of rights?,” Common Market Law Review 51(2014): 1-26. http://bura.brunel.ac.uk/bitstream/2438/9724/1/Fulltext.pdf.&lt;/li&gt;
&lt;li&gt;Sinha and Mason, “A Critique of Consent in Informational Privacy.”&lt;/li&gt;
&lt;li&gt;Moerel and Prins, “Privacy for Homo Digitalis.”&lt;/li&gt;&lt;/ol&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/digital-policy-portal-july-13-2016-new-approaches-to-information-privacy-revisiting-the-purpose-limitation-principle'&gt;https://cis-india.org/internet-governance/blog/digital-policy-portal-july-13-2016-new-approaches-to-information-privacy-revisiting-the-purpose-limitation-principle&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Aadhaar</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2016-11-09T13:54:28Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/the-fundamental-right-to-privacy-an-analysis">
    <title>The Fundamental Right to Privacy: An Analysis</title>
    <link>https://cis-india.org/internet-governance/blog/the-fundamental-right-to-privacy-an-analysis</link>
    <description>
        &lt;b&gt;Last​ ​month’s​ ​judgment​ ​by​ ​the​ ​nine​ ​judge​ ​referral​ ​bench​ ​was​ ​an​ ​emphatic endorsement​ ​of​ ​the​ ​the​ ​constitutional​ ​right​ ​to​ ​privacy.​ ​In​ ​the​ ​course​ ​of​ ​a​ ​547​ ​page judgment,​ ​the​ ​bench​ ​affirmed​ ​the​ ​fundamental​ ​nature​ ​of​ ​the​ ​right​ ​to​ ​privacy reading​ ​it​ ​into​ ​the​ ​values​ ​of​ ​dignity​ ​and​ ​liberty.​ In the course of a few short papers, we will dissect the various aspects of the right to privacy as put forth by the nine judge constitutional bench in the Puttaswamy matter. The papers will focus on the sources, structure, scope, breadth, and future of privacy. Here are the first three papers, authored by Amber Sinha and edited by Elonnai Hickok.


&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h3&gt;&lt;strong&gt;The​ ​Fundamental​ ​Right​ ​to​ ​Privacy - Part​ ​I:​ ​Sources&lt;/strong&gt;&lt;/h3&gt;
&lt;p&gt;Much​ ​of​ ​the​ ​debate​ ​and​ ​discussion​ ​in​ ​the​ ​hearings​ ​before​ ​the​ ​constitutional​ ​bench was​ ​regarding​ ​where​ ​in​ ​the​ ​Constitution​ ​a​ ​right​ ​to​ ​privacy​ ​may​ ​be​ ​located.​ In​ ​this paper,​ ​we​ ​analyse​ ​the​ ​different​ ​provisions​ ​and​ ​tools​ ​of​ ​interpretations​ ​use​ ​by​ ​the bench​ ​to​ ​read​ ​a​ ​right​ ​to​ ​privacy​ ​in​ ​Part​ ​III​ ​of​ ​the​ ​Constitution.&lt;/p&gt;
&lt;h4&gt;Download: &lt;a href="https://cis-india.org/internet-governance/files/amber-sinha-the-fundamental-right-to-privacy-i-sources-pdf/at_download/file"&gt;PDF&lt;/a&gt;&lt;/h4&gt;
&lt;hr /&gt;
&lt;h3&gt;&lt;strong&gt;The​ ​Fundamental​ ​Right​ ​to​ ​Privacy - ​Part​ ​II:​ Structure&lt;/strong&gt;&lt;/h3&gt;
&lt;p&gt;​In​ ​the​ ​previous​ ​paper,​ ​we delved​ ​into​ ​the​ ​ ​sources​ ​in​ ​the​ ​Constitution​ ​and​ ​the​ ​interpretive​ ​tools​ ​used​ ​to​ ​locate 
the​ ​right​ ​to​ ​privacy​ ​as​ ​a​ ​constitutional​ ​right.​ ​This​ ​paper​ ​follows​ ​it​ ​up​ ​with​ ​an​ ​analysis of​ ​the​ ​structure​ ​of​ ​the​ ​right​ ​to​ ​privacy​ ​as​ ​articulated​ ​by​ ​the​ ​bench.​ ​We​ ​will​ ​look​ ​at​ ​the various​ ​facets​ ​of​ ​privacy​ ​which​ ​form​ ​a​ ​part​ ​of​ ​the​ ​fundamental​ ​right,​ ​the​ ​basis​ ​for such​ ​dimensions​ ​and​ ​what​ ​their​ ​implications​ ​may​ ​be.&lt;/p&gt;
&lt;h4&gt;Download: &lt;a href="https://cis-india.org/internet-governance/files/amber-sinha-the-fundamental-right-to-privacy-ii-structure-pdf/at_download/file"&gt;PDF&lt;/a&gt;&lt;/h4&gt;
&lt;hr /&gt;
&lt;h3&gt;&lt;strong&gt;The​ ​Fundamental​ ​Right​ ​to​ ​Privacy - Part​ ​III:​ Scope&lt;/strong&gt;&lt;/h3&gt;
&lt;p&gt;While the previous papers dealt with the sources in the Constitution and the interpretive tools used by the bench to locate the right to privacy as a constitutional right, as well as the structure of the right with its various dimensions, this paper will look at the judgment for guidance on principles to determine what the scope of the right of privacy may be.&lt;/p&gt;
&lt;h4&gt;Download: &lt;a href="https://cis-india.org/internet-governance/the200b-200bfundamental200b-200bright200b-200bto200b-200bprivacy-200b-200bpart200b-200biii-scope/at_download/file" class="external-link"&gt;PDF&lt;/a&gt;&lt;/h4&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/the-fundamental-right-to-privacy-an-analysis'&gt;https://cis-india.org/internet-governance/blog/the-fundamental-right-to-privacy-an-analysis&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Featured</dc:subject>
    
    
        <dc:subject>Homepage</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-10-04T11:19:46Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/regulating-sexist-online-harassment-a-model-of-online-harassment-as-a-form-of-censorship">
    <title>Regulating Sexist Online Harassment: A Model of Online Harassment as a Form of Censorship</title>
    <link>https://cis-india.org/internet-governance/blog/regulating-sexist-online-harassment-a-model-of-online-harassment-as-a-form-of-censorship</link>
    <description>
        &lt;b&gt;Amber Sinha wrote a paper on regulating sexist online harassment, and how online harassment serves as a form of censorship, for the “Recognize, Resist, Remedy: Addressing Gender-Based Hate Speech in the Online Public Sphere” project, a collaborative project between IT for Change, India and InternetLab, Brazil.&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Read the full paper &lt;a class="external-link" href="https://itforchange.net/sites/default/files/1883/Amber-Sinha-Rethinking-Legal-Institutional-Approaches-to-Sexist-Hate-Speech-ITfC-IT-for-Change_0.pdf"&gt;here&lt;/a&gt;.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/regulating-sexist-online-harassment-a-model-of-online-harassment-as-a-form-of-censorship'&gt;https://cis-india.org/internet-governance/blog/regulating-sexist-online-harassment-a-model-of-online-harassment-as-a-form-of-censorship&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>


   <dc:date>2021-03-11T04:14:28Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/rethinking-national-privacy-principles">
    <title>Rethinking National Privacy Principles: Evaluating Principles for India's Proposed Data Protection Law</title>
    <link>https://cis-india.org/internet-governance/blog/rethinking-national-privacy-principles</link>
    <description>
        &lt;b&gt;This report is intended to be the first part in a series of white papers that CIS will publish which seeks to contribute to the discussions around the enactment of a privacy legislation in India. In subsequent pieces we will focus on subjects such as regulatory framework to implement, supervise and enforce privacy principles, and principles to regulate surveillance in India under a privacy law.&lt;/b&gt;
        &lt;p&gt;Edited by Elonnai Hickok and Vipul Kharbanda&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;This analysis intends to build on the substantial work done in the formulation of the National Privacy Principles by the Committee of Experts led by Justice AP Shah.1 This brief, hopes to evaluate the National Privacy Principles and the assertion by the Committee that right to privacy be considered a fundamental right under the Indian Constitution. The national privacy principles have been revisited in light of technological developments such as big data, Internet of Things, algorithmic decision making and artificial intelligence which are increasingly playing a greater role in the collection and processing of personal data of individuals, its analysis and decisions taken on the basis of such analysis. The solutions and principles articulated in this report are intended to provide starting points for a meaningful and nuanced discussion on how we need to rethink the privacy principles that should inform the data protection law in India.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a class="external-link" href="http://cis-india.org/internet-governance/files/rethinking-privacy-principles"&gt;Click to read the full blog post&lt;/a&gt;&lt;/p&gt;
&lt;p&gt; &lt;/p&gt;
&lt;p&gt; &lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/rethinking-national-privacy-principles'&gt;https://cis-india.org/internet-governance/blog/rethinking-national-privacy-principles&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2017-09-11T02:22:01Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/files/data-for-the-benefit-of-people">
    <title>Data for the Benefit of People</title>
    <link>https://cis-india.org/internet-governance/files/data-for-the-benefit-of-people</link>
    <description>
        &lt;b&gt;&lt;/b&gt;
        
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/files/data-for-the-benefit-of-people'&gt;https://cis-india.org/internet-governance/files/data-for-the-benefit-of-people&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>


   <dc:date>2018-12-01T04:21:32Z</dc:date>
   <dc:type>File</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/technical-appendix-to-use-of-sentiment-analysis-by-law-enforcement-an-analysis-of-scrutability-for-juridical-purposes">
    <title>Technical Appendix to 'Use of sentiment analysis by law enforcement: An analysis of scrutability for juridical purposes'</title>
    <link>https://cis-india.org/internet-governance/technical-appendix-to-use-of-sentiment-analysis-by-law-enforcement-an-analysis-of-scrutability-for-juridical-purposes</link>
    <description>
        &lt;b&gt;This file contains the technical appendix to the paper titled 'Use of sentiment analysis by law enforcement: An analysis of scrutability for juridical purposes' by Dr. Hans Varghese Mathews and Amber Sinha&lt;/b&gt;
        
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/technical-appendix-to-use-of-sentiment-analysis-by-law-enforcement-an-analysis-of-scrutability-for-juridical-purposes'&gt;https://cis-india.org/internet-governance/technical-appendix-to-use-of-sentiment-analysis-by-law-enforcement-an-analysis-of-scrutability-for-juridical-purposes&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>


   <dc:date>2020-05-03T12:43:05Z</dc:date>
   <dc:type>File</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/unpacking-data-protection-law-a-visual-representation">
    <title>Unpacking Data Protection Law: A Visual Representation</title>
    <link>https://cis-india.org/internet-governance/blog/unpacking-data-protection-law-a-visual-representation</link>
    <description>
        &lt;b&gt;This visual explainer unpacking data protection law was developed by Amber Sinha (research) and Pooja Saxena (design), and published as part of the Data Privacy Week celebrations on the Privacy International blog. Join the conversation on Twitter using #dataprivacyweek.&lt;/b&gt;
        
&lt;p&gt;&amp;nbsp;&lt;/p&gt;
&lt;h4&gt;Cross-posted from &lt;a href="https://medium.com/@privacyint/unpacking-data-protection-300e51c5f9b5" target="_blank"&gt;Privacy International blog&lt;/a&gt;.&lt;/h4&gt;
&lt;h4&gt;Credits: Flag illustrations, when not created by the authors, are from &lt;a href="http://www.freepik.com/" target="_blank"&gt;Ibrandify / Freepik&lt;/a&gt;.&lt;/h4&gt;
&lt;hr /&gt;
&lt;img src="https://github.com/cis-india/website/blob/master/img/AS-PS_UnpackingDataProtectionLaw_2018_01.png?raw=true" alt="Data protection law systems are usually seen as a dichotomy between the United State of America and the European Union" width="80%" /&gt;
&lt;img src="https://github.com/cis-india/website/blob/master/img/AS-PS_UnpackingDataProtectionLaw_2018_02.png?raw=true" alt="This dichotomy is not an accurate representation of the issue. Today, close to a hundred countries follow the omnibus approach, while less than a dozen, including the US, use the sectoral approach." width="80%" /&gt;
&lt;img src="https://github.com/cis-india/website/blob/master/img/AS-PS_UnpackingDataProtectionLaw_2018_03.gif?raw=true" alt="If too many laws apply to the same actor, compliance becomes difficult. As a result, the sectoral approach to data protection is becoming less relevant." width="80%" /&gt;
&lt;img src="https://github.com/cis-india/website/blob/master/img/AS-PS_UnpackingDataProtectionLaw_2018_04.png?raw=true" alt="Data protection regulation involve interaction between regulators and industry." width="80%" /&gt;
&lt;img src="https://github.com/cis-india/website/blob/master/img/AS-PS_UnpackingDataProtectionLaw_2018_05.gif?raw=true" alt="To be an effective data protection regulator, an entire range of regulatory tools are required, which the regulator can use to reward, support and sanction." width="80%" /&gt;
&lt;p&gt;&amp;nbsp;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/unpacking-data-protection-law-a-visual-representation'&gt;https://cis-india.org/internet-governance/blog/unpacking-data-protection-law-a-visual-representation&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>amber</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Data Governance</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Data Protection</dc:subject>
    
    
        <dc:subject>Privacy</dc:subject>
    

   <dc:date>2018-02-15T13:22:00Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>




</rdf:RDF>
